42 Commits

Author SHA1 Message Date
eb84adb4a3 Reduce Boilerplate 2023-04-24 19:04:44 +02:00
25c4e97f71 Merge branch 'chaoscaot/support_sponge_v1_v3' 2023-04-23 12:19:22 +02:00
7d9e7f28a9 Fix SQL 2023-04-23 12:17:59 +02:00
ef1f8ed38f Merge pull request #8 from Chaoscaot/chaoscaot/support_sponge_v1_v3
Add Support for Sponge Schematic V1 and V3
2023-04-22 21:55:51 +02:00
4671f38591 Bump Version 2023-04-22 21:55:08 +02:00
5cff84538d Fix Performance 2023-04-22 21:29:18 +02:00
9a0b0535c6 remove Serde 2023-04-22 21:03:00 +02:00
a47c2f44bd Something™️ 2023-04-22 16:39:21 +02:00
246927d840 idk what im doing 2023-04-14 17:56:28 +02:00
d1a01dc0c1 Faster but not working 2023-04-13 23:49:39 +02:00
e03a805bdb Something Working :D 2023-04-13 23:16:12 +02:00
9cca860db3 Some new Ideas 2023-04-13 16:16:02 +02:00
80f5191ae8 Merge branch 'master' into chaoscaot/support_sponge_v1_v3 2023-04-13 14:33:59 +02:00
3f20cbc17f Create CODE_OF_CONDUCT.md 2023-04-13 00:25:42 +02:00
733aaa9e72 Update dependabot.yml 2023-04-13 00:21:24 +02:00
14866df17d Create dependabot.yml 2023-04-13 00:20:53 +02:00
00e3d6fd0f Fix Cache 2023-04-05 13:07:14 +02:00
fb8f935617 Fix Cache and Bump Version 2023-04-05 13:05:15 +02:00
2a112ac49c Add Output Limit 2023-04-05 02:43:28 +02:00
e7c1fd1ef7 Fixing Something? 2023-04-05 00:33:21 +02:00
80eeaad5d5 Add output for machines 2023-04-04 22:38:02 +02:00
64158cf45b Remove Timer from Progressbar 2023-04-04 21:44:43 +02:00
e4b26755ea Revert "Print Progressbar to stdout"
This reverts commit 5607dcc72c.
2023-04-04 17:34:12 +02:00
5607dcc72c Print Progressbar to stdout 2023-04-04 17:29:04 +02:00
5c9bcfc2ec Add SQL to Makefile 2023-04-04 16:31:48 +02:00
a1b5449f06 Some basic tests and basic impls 2023-04-04 12:07:33 +02:00
1df33249c4 Add Makefile for easier building 2023-04-04 00:36:40 +02:00
ef2755115c Fix tests 2023-04-01 11:14:44 +02:00
b32aac0aba Fix naming and Tests 2023-04-01 11:08:57 +02:00
a9a3e70aef Update Roadmap 2023-04-01 11:07:12 +02:00
c477a52f92 Slowdown ProgressBar and add Stderr as output 2023-04-01 11:02:49 +02:00
818de6be47 Abstractions 2023-04-01 10:30:25 +02:00
8f15b42146 Add Issue Templates 2023-03-21 18:31:50 +01:00
b8d912881d Fix SQL-Interface 2023-03-19 21:18:40 +01:00
02404792a5 Bump Version 2023-03-19 11:59:39 +01:00
59272ed3e7 Add Complex Output Patterns 2023-03-19 11:57:22 +01:00
322ba65656 Add Caches 2023-03-18 10:27:34 +01:00
b082d6cd8d Fix Imports 2023-03-16 21:33:23 +01:00
f4bcde73f9 Add Roadmap 2023-03-16 21:09:41 +01:00
0e31714582 Add Roadmap 2023-03-16 21:08:56 +01:00
229c858d9a Optimize Workflows 2023-03-16 20:40:37 +01:00
abf6953172 Optimize Workflows 2023-03-16 20:40:17 +01:00
26 changed files with 803 additions and 225 deletions

40
.github/ISSUE_TEMPLATE/bug.yml vendored Normal file
View File

@ -0,0 +1,40 @@
name: Bug Report
description: Create a report to fix a bug
labels: [bug]
title: "[BUG] <title>"
body:
- type: textarea
id: description
attributes:
label: Description
description: A clear and concise description of what the bug is.
validations:
required: true
- type: textarea
id: reproduction
attributes:
label: Reproduction
description: Steps to reproduce the behavior.
validations:
required: true
- type: textarea
id: expected-behavior
attributes:
label: Expected Behavior
description: A clear and concise description of what you expected to happen.
validations:
required: true
- type: textarea
id: actual-behavior
attributes:
label: Actual Behavior
description: A clear and concise description of what actually happened.
validations:
required: true
- type: textarea
id: additional-context
attributes:
label: Additional Context
description: Add any other context about the problem here.
validations:
required: false

35
.github/ISSUE_TEMPLATE/feature.yml vendored Normal file
View File

@ -0,0 +1,35 @@
name: Feature Request
description: Suggest an idea for this project
title: "[FEATURE] <title>"
labels: [enhancement]
body:
- type: textarea
id: description
attributes:
label: Description
description: A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
validations:
required: true
- type: textarea
id: solution
attributes:
label: Proposed Solution
description: A clear and concise description of what you want to happen.
validations:
required: true
- type: textarea
id: alternatives
attributes:
label: Alternatives
description: A clear and concise description of any alternative solutions or features you've considered.
validations:
required: false
- type: textarea
id: additional-context
attributes:
label: Additional Context
description: Add any other context or screenshots about the feature request here.
validations:
required: false

6
.github/dependabot.yml vendored Normal file
View File

@ -0,0 +1,6 @@
version: 2
updates:
- package-ecosystem: "cargo"
directory: "/"
schedule:
interval: "weekly"

View File

@ -16,18 +16,41 @@ jobs:
steps:
- uses: actions/checkout@v3
- name: Cache Cargo modules
id: cache-cargo
uses: actions/cache@v3
continue-on-error: false
with:
path: |
~/.cargo/registry
~/.cargo/git
key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }}
restore-keys: |
${{ runner.os }}-cargo-
- name: Build
run: cargo build --verbose
run: make debug
- name: Run tests
run: cargo test --verbose
build-realease:
run: cargo test --verbose -p schemsearch-lib
build-release:
needs:
- build
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Cache Cargo modules
id: cache-cargo
uses: actions/cache@v3
continue-on-error: false
with:
path: |
~/.cargo/registry
~/.cargo/git
key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }}
restore-keys: |
${{ runner.os }}-cargo-
- name: Build
run: cargo build --verbose --release
run: make
- name: Upload a Build Artifact
uses: actions/upload-artifact@v3.1.2
with:

View File

@ -19,8 +19,19 @@ jobs:
steps:
- uses: actions/checkout@v3
- name: Cache Cargo modules
id: cache-cargo
uses: actions/cache@v3
continue-on-error: false
with:
path: |
~/.cargo/registry
~/.cargo/git
key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }}
restore-keys: |
${{ runner.os }}-cargo-
- name: Build
run: cargo build --verbose --release
run: make
- name: Create Tarball
if: ${{ matrix.os != 'windows-latest' }}
run: tar -czvf schemsearch-cli-${{ matrix.os }}.tar.gz -C target/release schemsearch-cli

128
CODE_OF_CONDUCT.md Normal file
View File

@ -0,0 +1,128 @@
# Contributor Covenant Code of Conduct
## Our Pledge
We as members, contributors, and leaders pledge to make participation in our
community a harassment-free experience for everyone, regardless of age, body
size, visible or invisible disability, ethnicity, sex characteristics, gender
identity and expression, level of experience, education, socio-economic status,
nationality, personal appearance, race, religion, or sexual identity
and orientation.
We pledge to act and interact in ways that contribute to an open, welcoming,
diverse, inclusive, and healthy community.
## Our Standards
Examples of behavior that contributes to a positive environment for our
community include:
* Demonstrating empathy and kindness toward other people
* Being respectful of differing opinions, viewpoints, and experiences
* Giving and gracefully accepting constructive feedback
* Accepting responsibility and apologizing to those affected by our mistakes,
and learning from the experience
* Focusing on what is best not just for us as individuals, but for the
overall community
Examples of unacceptable behavior include:
* The use of sexualized language or imagery, and sexual attention or
advances of any kind
* Trolling, insulting or derogatory comments, and personal or political attacks
* Public or private harassment
* Publishing others' private information, such as a physical or email
address, without their explicit permission
* Other conduct which could reasonably be considered inappropriate in a
professional setting
## Enforcement Responsibilities
Community leaders are responsible for clarifying and enforcing our standards of
acceptable behavior and will take appropriate and fair corrective action in
response to any behavior that they deem inappropriate, threatening, offensive,
or harmful.
Community leaders have the right and responsibility to remove, edit, or reject
comments, commits, code, wiki edits, issues, and other contributions that are
not aligned to this Code of Conduct, and will communicate reasons for moderation
decisions when appropriate.
## Scope
This Code of Conduct applies within all community spaces, and also applies when
an individual is officially representing the community in public spaces.
Examples of representing our community include using an official e-mail address,
posting via an official social media account, or acting as an appointed
representative at an online or offline event.
## Enforcement
Instances of abusive, harassing, or otherwise unacceptable behavior may be
reported to the community leaders responsible for enforcement at
chaoscaot@zohomail.eu.
All complaints will be reviewed and investigated promptly and fairly.
All community leaders are obligated to respect the privacy and security of the
reporter of any incident.
## Enforcement Guidelines
Community leaders will follow these Community Impact Guidelines in determining
the consequences for any action they deem in violation of this Code of Conduct:
### 1. Correction
**Community Impact**: Use of inappropriate language or other behavior deemed
unprofessional or unwelcome in the community.
**Consequence**: A private, written warning from community leaders, providing
clarity around the nature of the violation and an explanation of why the
behavior was inappropriate. A public apology may be requested.
### 2. Warning
**Community Impact**: A violation through a single incident or series
of actions.
**Consequence**: A warning with consequences for continued behavior. No
interaction with the people involved, including unsolicited interaction with
those enforcing the Code of Conduct, for a specified period of time. This
includes avoiding interactions in community spaces as well as external channels
like social media. Violating these terms may lead to a temporary or
permanent ban.
### 3. Temporary Ban
**Community Impact**: A serious violation of community standards, including
sustained inappropriate behavior.
**Consequence**: A temporary ban from any sort of interaction or public
communication with the community for a specified period of time. No public or
private interaction with the people involved, including unsolicited interaction
with those enforcing the Code of Conduct, is allowed during this period.
Violating these terms may lead to a permanent ban.
### 4. Permanent Ban
**Community Impact**: Demonstrating a pattern of violation of community
standards, including sustained inappropriate behavior, harassment of an
individual, or aggression toward or disparagement of classes of individuals.
**Consequence**: A permanent ban from any sort of public interaction within
the community.
## Attribution
This Code of Conduct is adapted from the [Contributor Covenant][homepage],
version 2.0, available at
https://www.contributor-covenant.org/version/2/0/code_of_conduct.html.
Community Impact Guidelines were inspired by [Mozilla's code of conduct
enforcement ladder](https://github.com/mozilla/diversity).
[homepage]: https://www.contributor-covenant.org
For answers to common questions about this code of conduct, see the FAQ at
https://www.contributor-covenant.org/faq. Translations are available at
https://www.contributor-covenant.org/translations.

View File

@ -2,7 +2,7 @@
members = [
"schemsearch-cli",
"schemsearch-lib",
"schemsearch_faster",
"schemsearch-faster",
"schemsearch-files",
"schemsearch-sql",
"schemsearch-java"

28
Makefile Normal file
View File

@ -0,0 +1,28 @@
default:
@echo "Building (Release)...";
cargo rustc --release --color=always -p schemsearch-cli -- -C target-feature=+avx2
sql:
@echo "Building (Release)...";
cargo rustc --release --color=always -p schemsearch-cli --features sql -- -C target-feature=+avx2
debug:
@echo "Building (Debug)...";
cargo build -p schemsearch-cli
install: default
@echo "Installing...";
install -Dm755 target/release/schemsearch-cli /usr/bin/schemsearch
uninstall:
@echo "Uninstalling...";
rm -f /usr/bin/schemsearch
java:
@echo "Building Java...";
@echo "WARNING: This is WORK IN PROGRESS!";
javac SchemSearch.java
clean:
@echo "Cleaning...";
cargo clean

View File

@ -5,10 +5,10 @@
## WARNING: This is a work in progress and is really simple right now. It will be improved in the future.
| Feature | Status |
|------------------------|--------|
|---------------------------|--------|
| Block search | ✅ |
| Block data less search | ✅ |
| Tile entities search | ❌ |
| Tile entities data search | ❌ |
| Entities search | ❌ |
---
@ -40,6 +40,16 @@ schemsearch-cli --help
---
## Roadmap
A list of features that are planned to be implemented in the future. In order of priority.
- [ ] Use AVX2 for faster search
- [ ] Tile entities data search
- [ ] Entities search
- [ ] McEdit Schematic support
---
## Building
This project is build using Rust for the CLI and library. It can be built using Cargo.
```bash

View File

@ -1,6 +1,6 @@
[package]
name = "schemsearch-cli"
version = "0.1.0"
version = "0.1.3"
edition = "2021"
license = "AGPL-3.0-or-later"
@ -16,6 +16,8 @@ futures = { version = "0.3", optional = true }
sqlx = { version = "0.6", features = [ "runtime-async-std-native-tls" , "mysql" ], optional = true }
rayon = "1.7.0"
indicatif = { version = "0.17.3", features = ["rayon"] }
serde = "1.0.157"
serde_json = "1.0.94"
[features]
sql = ["dep:schemsearch-sql", "dep:futures", "dep:sqlx"]

View File

@ -0,0 +1,29 @@
use serde::{Deserialize, Serialize};
use schemsearch_lib::{Match, SearchBehavior};
#[derive(Serialize, Deserialize, Debug)]
#[serde(tag = "event")]
pub enum JsonEvent {
Found(FoundEvent),
Init(InitEvent),
End(EndEvent),
}
#[derive(Serialize, Deserialize, Debug)]
pub struct FoundEvent {
pub name: String,
#[serde(flatten, rename = "match")]
pub match_: Match,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct InitEvent {
pub total: u32,
pub search_behavior: SearchBehavior,
pub start_time: u128,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct EndEvent {
pub end_time: u128,
}

View File

@ -16,16 +16,17 @@
*/
mod types;
mod json_output;
mod sinks;
mod stderr;
use std::fmt::Debug;
use std::fs::File;
use std::io;
use std::io::{BufWriter, Write};
use std::io::Write;
use clap::{command, Arg, ArgAction, ValueHint};
use schemsearch_files::Schematic;
use std::path::PathBuf;
use std::str::FromStr;
use clap::error::ErrorKind;
use schemsearch_lib::{search, SearchBehavior};
use schemsearch_lib::{Match, search, SearchBehavior};
use crate::types::{PathSchematicSupplier, SchematicSupplierType};
#[cfg(feature = "sql")]
use futures::executor::block_on;
@ -37,7 +38,10 @@ use schemsearch_sql::filter::SchematicFilter;
use schemsearch_sql::load_all_schematics;
#[cfg(feature = "sql")]
use crate::types::SqlSchematicSupplier;
use indicatif::{ProgressBar, ParallelProgressIterator, ProgressStyle};
use indicatif::*;
use schemsearch_files::SpongeSchematic;
use crate::sinks::{OutputFormat, OutputSink};
use crate::stderr::MaschineStdErr;
fn main() {
#[allow(unused_mut)]
@ -92,20 +96,32 @@ fn main() {
)
.arg(
Arg::new("output")
.help("The output format")
.help("The output format and path [Format:Path] available formats: text, json, csv; available paths: std, err, (file path)")
.short('o')
.long("output")
.action(ArgAction::Append)
.default_value("std")
.value_parser(["std_csv", "file_csv", "std", "file"]),
)
.arg(
Arg::new("output-file")
.help("The output file")
.short('O')
.long("output-file")
.value_hint(ValueHint::FilePath)
.action(ArgAction::Append)
.default_value("text:std")
.value_parser(|s: &str| {
let mut split = s.splitn(2, ':');
let format = match split.next() {
None => return Err("No format specified".to_string()),
Some(x) => x
};
let path = match split.next() {
None => return Err("No path specified".to_string()),
Some(x) => x
};
let format = match OutputFormat::from_str(format) {
Ok(x) => x,
Err(e) => return Err(e.to_string()),
};
let path = match OutputSink::from_str(path) {
Ok(x) => x,
Err(e) => return Err(e.to_string()),
};
Ok((format, path))
}),
)
.arg(
Arg::new("threshold")
@ -125,6 +141,24 @@ fn main() {
.default_value("0")
.value_parser(|s: &str| s.parse::<usize>().map_err(|e| e.to_string())),
)
.arg(
Arg::new("machine")
.help("Output for machines")
.short('m')
.long("machine")
.action(ArgAction::Set)
.default_value("0")
.value_parser(|s: &str| s.parse::<u16>().map_err(|e| e.to_string()))
)
.arg(
Arg::new("limit")
.help("The maximum number of matches to return [0 = Unlimited]")
.short('l')
.long("limit")
.action(ArgAction::Set)
.default_value("50")
.value_parser(|s: &str| s.parse::<usize>().map_err(|e| e.to_string())),
)
.about("Searches for a pattern in a schematic")
.bin_name("schemsearch");
@ -172,7 +206,7 @@ fn main() {
threshold: *matches.get_one::<f32>("threshold").expect("Couldn't get threshold"),
};
let pattern = match Schematic::load(&PathBuf::from(matches.get_one::<String>("pattern").unwrap())) {
let pattern = match SpongeSchematic::load(&PathBuf::from(matches.get_one::<String>("pattern").unwrap())) {
Ok(x) => x,
Err(e) => {
cmd.error(ErrorKind::Io, format!("Error while loading Pattern: {}", e.to_string())).exit();
@ -223,52 +257,35 @@ fn main() {
cmd.error(ErrorKind::MissingRequiredArgument, "No schematics specified").exit();
}
let mut output_std = false;
let mut output_std_csv = false;
let mut output_file_csv = false;
let mut output_file = false;
let output: Vec<&(OutputFormat, OutputSink)> = matches.get_many::<(OutputFormat, OutputSink)>("output").expect("Error").collect();
let mut output: Vec<(OutputFormat, Box<dyn Write>)> = output.into_iter().map(|x| (x.0.clone(), x.1.output())).collect();
for x in matches.get_many::<String>("output").expect("Couldn't get output") {
match x.as_str() {
"std" => output_std = true,
"std_csv" => output_std_csv = true,
"file_csv" => output_file_csv = true,
"file" => output_file = true,
_ => {}
for x in &mut output {
write!(x.1, "{}", x.0.start(schematics.len() as u32, &search_behavior, start.elapsed().as_millis())).unwrap();
}
};
let file: Option<File>;
let mut file_out: Option<BufWriter<File>> = None;
if output_file || output_file_csv {
let output_file_path = match matches.get_one::<String>("output-file") {
None => {
cmd.error(ErrorKind::MissingRequiredArgument, "No output file specified").exit();
}
Some(x) => x
};
file = match File::create(output_file_path) {
Ok(x) => Some(x),
Err(e) => {
cmd.error(ErrorKind::Io, format!("Error while creating output file: {}", e.to_string())).exit();
}
};
file_out = Some(BufWriter::new(file.unwrap()));
}
ThreadPoolBuilder::new().num_threads(*matches.get_one::<usize>("threads").expect("Could not get threads")).build_global().unwrap();
let matches: Vec<Result> = schematics.par_iter().progress_with_style(ProgressStyle::with_template("[{elapsed}, ETA: {eta}] {wide_bar} {pos}/{len} {per_sec}").unwrap()).map(|schem| {
let bar = ProgressBar::new(schematics.len() as u64); // "maschine"
bar.set_style(ProgressStyle::with_template("[{elapsed}, ETA: {eta}] {wide_bar} {pos}/{len} {per_sec}").unwrap());
let term_size = *matches.get_one::<u16>("machine").expect("Could not get machine");
if term_size != 0 {
bar.set_draw_target(ProgressDrawTarget::term_like(Box::new(MaschineStdErr { size: term_size })))
}
let max_matching = *matches.get_one::<usize>("limit").expect("Could not get max-matching");
let matches: Vec<SearchResult> = schematics.par_iter().progress_with(bar).map(|schem| {
match schem {
SchematicSupplierType::PATH(schem) => {
let schematic = match load_schem(&schem.path) {
Some(x) => x,
None => return Result {
None => return SearchResult {
name: schem.get_name(),
matches: vec![]
matches: Vec::default()
}
};
Result {
SearchResult {
name: schem.get_name(),
matches: search(schematic, &pattern, search_behavior)
}
@ -277,18 +294,16 @@ fn main() {
SchematicSupplierType::SQL(schem) => {
match schem.get_schematic() {
Ok(schematic) => {
Result {
SearchResult {
name: schem.get_name(),
matches: search(schematic, &pattern, search_behavior)
}
}
Err(e) => {
if !output_std && !output_std_csv {
println!("Error while loading schematic ({}): {}", schem.get_name(), e.to_string());
}
Result {
eprintln!("Error while loading schematic ({}): {}", schem.get_name(), e.to_string());
SearchResult {
name: schem.get_name(),
matches: vec![]
matches: Vec::default()
}
}
}
@ -296,34 +311,31 @@ fn main() {
}
}).collect();
let stdout = io::stdout();
let mut lock = stdout.lock();
let mut matches_count = 0;
for matching in matches {
'outer: for matching in matches {
let schem_name = matching.name;
let matching = matching.matches;
for x in matching {
if output_std {
writeln!(lock, "Found match in '{}' at x: {}, y: {}, z: {}, % = {}", schem_name, x.0, x.1, x.2, x.3).unwrap();
for out in &mut output {
write!(out.1, "{}", out.0.found_match(&schem_name, x)).unwrap();
}
if output_std_csv {
writeln!(lock, "{},{},{},{},{}", schem_name, x.0, x.1, x.2, x.3).unwrap();
}
if output_file {
writeln!(file_out.as_mut().unwrap(), "Found match in '{}' at x: {}, y: {}, z: {}, % = {}", schem_name, x.0, x.1, x.2, x.3).unwrap();
}
if output_file_csv {
writeln!(file_out.as_mut().unwrap(), "{},{},{},{},{}", schem_name, x.0, x.1, x.2, x.3).unwrap();
matches_count += 1;
if max_matching != 0 && matches_count >= max_matching {
break 'outer;
}
}
}
let end = std::time::Instant::now();
println!("Finished in {:.2}s! Searched in {} Schematics", end.duration_since(start).as_secs_f32(), schematics.len());
for x in &mut output {
write!(x.1, "{}", x.0.end(end.duration_since(start).as_millis())).unwrap();
x.1.flush().unwrap();
}
}
fn load_schem(schem_path: &PathBuf) -> Option<Schematic> {
match Schematic::load(schem_path) {
fn load_schem(schem_path: &PathBuf) -> Option<SpongeSchematic> {
match SpongeSchematic::load(schem_path) {
Ok(x) => Some(x),
Err(e) => {
println!("Error while loading schematic ({}): {}", schem_path.to_str().unwrap(), e.to_string());
@ -333,8 +345,8 @@ fn load_schem(schem_path: &PathBuf) -> Option<Schematic> {
}
#[derive(Debug, Clone)]
struct Result {
struct SearchResult {
name: String,
matches: Vec<(u16, u16, u16, f32)>,
matches: Vec<Match>,
}

View File

@ -0,0 +1,90 @@
use std::fs::File;
use std::io::BufWriter;
use std::str::FromStr;
use std::io::Write;
use std::time::Duration;
use indicatif::HumanDuration;
use schemsearch_lib::{Match, SearchBehavior};
use crate::json_output::{EndEvent, FoundEvent, InitEvent, JsonEvent};
#[derive(Debug, Clone)]
pub enum OutputSink {
Stdout,
Stderr,
File(String),
}
#[derive(Debug, Clone)]
pub enum OutputFormat {
Text,
CSV,
JSON
}
impl FromStr for OutputFormat {
type Err = String;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"text" => Ok(OutputFormat::Text),
"csv" => Ok(OutputFormat::CSV),
"json" => Ok(OutputFormat::JSON),
_ => Err(format!("'{}' is not a valid output format", s))
}
}
}
impl FromStr for OutputSink {
type Err = String;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"std" => Ok(OutputSink::Stdout),
"err" => Ok(OutputSink::Stderr),
_ => Ok(OutputSink::File(s.to_string()))
}
}
}
impl OutputSink {
pub fn output(&self) -> Box<dyn Write> {
match self {
OutputSink::Stdout => Box::new(std::io::stdout()),
OutputSink::Stderr => Box::new(std::io::stderr()),
OutputSink::File(path) => Box::new(BufWriter::new(File::create(path).unwrap()))
}
}
}
impl OutputFormat {
pub fn found_match(&self, name: &String, pos: Match) -> String {
match self {
OutputFormat::Text => format!("Found match in '{}' at x: {}, y: {}, z: {}, % = {}\n", name, pos.x, pos.y, pos.z, pos.percent),
OutputFormat::CSV => format!("{},{},{},{},{}\n", name, pos.x, pos.y, pos.z, pos.percent),
OutputFormat::JSON => format!("{}\n", serde_json::to_string(&JsonEvent::Found(FoundEvent {
name: name.clone(),
match_: pos,
})).unwrap())
}
}
pub fn start(&self, total: u32, search_behavior: &SearchBehavior, start_time: u128) -> String {
match self {
OutputFormat::Text => format!("Starting search in {} schematics\n", total),
OutputFormat::CSV => format!("Name,X,Y,Z,Percent\n"),
OutputFormat::JSON => format!("{}\n", serde_json::to_string(&JsonEvent::Init(InitEvent {
total,
search_behavior: search_behavior.clone(),
start_time,
})).unwrap())
}
}
pub fn end(&self, end_time: u128) -> String {
match self {
OutputFormat::Text => format!("Search complete in {}\n", HumanDuration(Duration::from_millis(end_time as u64))),
OutputFormat::CSV => format!("{}\n", end_time),
OutputFormat::JSON => format!("{}\n", serde_json::to_string(&JsonEvent::End(EndEvent{ end_time })).unwrap())
}
}
}

View File

@ -0,0 +1,44 @@
use std::fmt::Debug;
use std::io::Write;
use indicatif::TermLike;
#[derive(Debug)]
pub struct MaschineStdErr { pub(crate) size: u16}
impl TermLike for MaschineStdErr {
fn width(&self) -> u16 {
self.size
}
fn move_cursor_up(&self, _: usize) -> std::io::Result<()> {
Ok(())
}
fn move_cursor_down(&self, _: usize) -> std::io::Result<()> {
Ok(())
}
fn move_cursor_right(&self, _: usize) -> std::io::Result<()> {
Ok(())
}
fn move_cursor_left(&self, _: usize) -> std::io::Result<()> {
Ok(())
}
fn write_line(&self, s: &str) -> std::io::Result<()> {
writeln!(std::io::stderr(), "{}", s)
}
fn write_str(&self, s: &str) -> std::io::Result<()> {
write!(std::io::stderr(), "{}", s)
}
fn clear_line(&self) -> std::io::Result<()> {
Ok(())
}
fn flush(&self) -> std::io::Result<()> {
std::io::stderr().flush()
}
}

View File

@ -15,10 +15,13 @@
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
#[cfg(feature = "sql")]
use std::io::Cursor;
use std::path::PathBuf;
#[cfg(feature = "sql")]
use futures::executor::block_on;
use schemsearch_files::Schematic;
#[cfg(feature = "sql")]
use schemsearch_files::SpongeSchematic;
#[cfg(feature = "sql")]
use schemsearch_sql::{load_schemdata, SchematicNode};
@ -45,9 +48,9 @@ pub struct SqlSchematicSupplier {
#[cfg(feature = "sql")]
impl SqlSchematicSupplier {
pub fn get_schematic(&self) -> Result<Schematic, String> {
let schemdata = block_on(load_schemdata(self.node.id));
Schematic::load_data(schemdata.as_slice())
pub fn get_schematic(&self) -> Result<SpongeSchematic, String> {
let mut schemdata = block_on(load_schemdata(self.node.id));
SpongeSchematic::load_data(&mut Cursor::new(schemdata.as_mut_slice()))
}
pub fn get_name(&self) -> String {

View File

@ -1,6 +1,6 @@
[package]
name = "schemsearch_faster"
version = "0.1.0"
version = "0.1.3"
edition = "2021"
license = "AGPL-3.0-or-later"

View File

@ -16,9 +16,9 @@
*/
use nbt::Map;
use schemsearch_files::Schematic;
use schemsearch_files::SpongeV2Schematic;
pub fn convert_to_search_space(schem: &Schematic, palette: &Vec<String>) -> Vec<Vec<u8>> {
pub fn convert_to_search_space(schem: &SpongeV2Schematic, palette: &Vec<String>) -> Vec<Vec<u8>> {
let mut data: Vec<Vec<u8>> = Vec::with_capacity(palette.len());
let block_data = &schem.block_data;
for name in palette {
@ -48,26 +48,26 @@ pub fn unwrap_palette(palette: &Map<String, i32>) -> Vec<String> {
#[cfg(test)]
mod tests {
use std::path::{Path, PathBuf};
use schemsearch_files::Schematic;
use schemsearch_files::SpongeV2Schematic;
use crate::{convert_to_search_space, unwrap_palette};
//#[test]
pub fn test() {
let schematic = Schematic::load(&PathBuf::from("../tests/Pattern.schem")).unwrap();
let schematic = SpongeV2Schematic::load(&PathBuf::from("../tests/Pattern.schem")).unwrap();
dbg!(convert_to_search_space(&schematic, &unwrap_palette(&schematic.palette)));
}
//#[test]
pub fn test_2() {
let schematic = Schematic::load(&PathBuf::from("../tests/Pattern.schem")).unwrap();
let schematic2 = Schematic::load(&PathBuf::from("../tests/Random.schem")).unwrap();
let schematic = SpongeV2Schematic::load(&PathBuf::from("../tests/Pattern.schem")).unwrap();
let schematic2 = SpongeV2Schematic::load(&PathBuf::from("../tests/Random.schem")).unwrap();
println!("{:?}", convert_to_search_space(&schematic2, &unwrap_palette(&schematic.palette)));
}
//#[test]
pub fn test_big() {
let schematic = Schematic::load(&PathBuf::from("../tests/endstone.schem")).unwrap();
let schematic2 = Schematic::load(&PathBuf::from("../tests/simple.schem")).unwrap();
let schematic = SpongeV2Schematic::load(&PathBuf::from("../tests/endstone.schem")).unwrap();
let schematic2 = SpongeV2Schematic::load(&PathBuf::from("../tests/simple.schem")).unwrap();
let _ = convert_to_search_space(&schematic2, &unwrap_palette(&schematic.palette));
}
}

View File

@ -1,10 +1,11 @@
[package]
name = "schemsearch-files"
version = "0.1.0"
version = "0.1.3"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
hematite-nbt = "0.5.2"
serde = "1.0.152"
flate2 = "1.0.25"
named-binary-tag = "0.6"

View File

@ -15,81 +15,166 @@
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
use std::collections::hash_map::HashMap;
use std::io::Read;
use std::path::PathBuf;
use nbt::{Map, Value};
use serde::{Deserialize, Deserializer, Serialize};
use nbt::{CompoundTag, Tag};
#[derive(Serialize, Deserialize, Debug)]
pub struct Schematic {
#[serde(rename = "Version")]
pub version: i32,
#[serde(rename = "DataVersion")]
#[derive(Clone, Debug)]
pub struct SpongeSchematic {
pub data_version: i32,
#[serde(rename = "Metadata")]
pub metadata: Map<String, Value>,
#[serde(rename = "Width")]
pub metadata: CompoundTag,
pub width: u16,
#[serde(rename = "Height")]
pub height: u16,
#[serde(rename = "Length")]
pub length: u16,
#[serde(rename = "Offset")]
pub offset: [i32; 3],
#[serde(rename = "PaletteMax")]
pub palette_max: i32,
#[serde(rename = "Palette")]
pub palette: Map<String, i32>,
#[serde(rename = "BlockData", deserialize_with = "read_blockdata")]
pub palette: HashMap<String, i32>,
pub block_data: Vec<i32>,
#[serde(rename = "BlockEntities")]
pub block_entities: Vec<BlockEntity>,
#[serde(rename = "Entities")]
pub entities: Option<Vec<Entity>>,
}
fn read_blockdata<'de, D>(deserializer: D) -> Result<Vec<i32>, D::Error>
where
D: Deserializer<'de>,
{
let s: Vec<i8> = Deserialize::deserialize(deserializer)?;
Ok(read_varint_array(&s))
#[derive(Clone, Debug)]
pub struct BlockContainer {
pub palette: HashMap<String, i32>,
pub block_data: Vec<i32>,
pub block_entities: Vec<BlockEntity>,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
#[derive(Debug, Clone)]
pub struct BlockEntity {
#[serde(rename = "Id")]
pub id: String,
#[serde(rename = "Pos")]
pub pos: [i32; 3],
}
#[derive(Serialize, Deserialize, Debug, Clone)]
#[derive(Debug, Clone)]
pub struct BlockEntityV3 {
pub id: String,
pub pos: [i32; 3],
pub data: HashMap<String, Tag>,
}
#[derive(Debug, Clone)]
pub struct Entity {
#[serde(rename = "Id")]
pub id: String,
#[serde(rename = "Pos")]
pub pos: [i32; 3],
}
impl Schematic {
pub fn load_data<R>(data: R) -> Result<Schematic, String> where R: Read {
let schematic: Schematic = match nbt::from_gzip_reader(data) {
Ok(schem) => schem,
Err(e) => return Err(format!("Failed to parse schematic: {}", e))
};
Ok(schematic)
}
impl SpongeSchematic {
pub fn load_data<R>(data: &mut R) -> Result<SpongeSchematic, String> where R: Read {
let nbt: CompoundTag = nbt::decode::read_gzip_compound_tag(data).map_err(|e| e.to_string())?;
let version = nbt.get_i32("Version").map_err(|e| e.to_string())?;
pub fn load(path: &PathBuf) -> Result<Schematic, String> {
let file = match std::fs::File::open(path) {
Ok(x) => x,
Err(_) => return Err(format!("Failed to open file: {}", path.to_str().unwrap()))
};
Schematic::load_data(file)
match version {
1 => SpongeSchematic::from_nbt_1(nbt),
2 => SpongeSchematic::from_nbt_2(nbt),
3 => SpongeSchematic::from_nbt_3(nbt),
_ => Err("Invalid schematic: Unknown Version".to_string()),
}
}
pub fn load(path: &PathBuf) -> Result<SpongeSchematic, String> {
let mut file = std::fs::File::open(path).map_err(|e| e.to_string())?;
Self::load_data(&mut file)
}
pub fn from_nbt_1(nbt: CompoundTag) -> Result<Self, String> {
Ok(Self {
data_version: 0,
metadata: nbt.get_compound_tag("Metadata").map_err(|e| e.to_string())?.clone(),
width: nbt.get_i16("Width").map_err(|e| e.to_string())? as u16,
height: nbt.get_i16("Height").map_err(|e| e.to_string())? as u16,
length: nbt.get_i16("Length").map_err(|e| e.to_string())? as u16,
offset: read_offset(nbt.get_i32_vec("Offset").map_err(|e| e.to_string())?)?,
palette_max: nbt.get_i32("PaletteMax").map_err(|e| e.to_string())?,
palette: read_palette(nbt.get_compound_tag("Palette").map_err(|e| e.to_string())?),
block_data: read_blocks(nbt.get_i8_vec("BlockData").map_err(|e| e.to_string())?),
block_entities: read_tile_entities(nbt.get_compound_tag_vec("TileEntities").map_err(|e| e.to_string())?)?,
entities: None,
})
}
pub fn from_nbt_2(nbt: CompoundTag) -> Result<Self, String> {
Ok(Self{
data_version: nbt.get_i32("DataVersion").map_err(|e| e.to_string())?,
metadata: nbt.get_compound_tag("Metadata").map_err(|e| e.to_string())?.clone(),
width: nbt.get_i16("Width").map_err(|e| e.to_string())? as u16,
height: nbt.get_i16("Height").map_err(|e| e.to_string())? as u16,
length: nbt.get_i16("Length").map_err(|e| e.to_string())? as u16,
offset: read_offset(nbt.get_i32_vec("Offset").map_err(|e| e.to_string())?)?,
palette_max: nbt.get_i32("PaletteMax").map_err(|e| e.to_string())?,
palette: read_palette(nbt.get_compound_tag("Palette").map_err(|e| e.to_string())?),
block_data: read_blocks(nbt.get_i8_vec("BlockData").map_err(|e| e.to_string())?),
block_entities: read_tile_entities(nbt.get_compound_tag_vec("BlockEntities").map_err(|e| e.to_string())?)?,
entities: None,
})
}
pub fn from_nbt_3(nbt: CompoundTag) -> Result<Self, String> {
let blocks = nbt.get_compound_tag("Blocks").map_err(|e| e.to_string())?;
Ok(Self{
data_version: nbt.get_i32("DataVersion").map_err(|e| e.to_string())?,
metadata: nbt.get_compound_tag("Metadata").map_err(|e| e.to_string())?.clone(),
width: nbt.get_i16("Width").map_err(|e| e.to_string())? as u16,
height: nbt.get_i16("Height").map_err(|e| e.to_string())? as u16,
length: nbt.get_i16("Length").map_err(|e| e.to_string())? as u16,
offset: read_offset(nbt.get_i32_vec("Offset").map_err(|e| e.to_string())?)?,
palette_max: compute_palette_max(blocks.get_compound_tag("Palette").map_err(|e| e.to_string())?),
palette: read_palette(blocks.get_compound_tag("Palette").map_err(|e| e.to_string())?),
block_data: read_blocks(blocks.get_i8_vec("BlockData").map_err(|e| e.to_string())?),
block_entities: read_tile_entities(blocks.get_compound_tag_vec("BlockEntities").map_err(|e| e.to_string())?)?,
entities: None,
})
}
}
fn read_tile_entities(tag: Vec<&CompoundTag>) -> Result<Vec<BlockEntity>, String> {
let mut tile_entities = Vec::new();
for t in tag {
tile_entities.push(BlockEntity {
id: t.get_str("Id").map_err(|e| e.to_string())?.to_string(),
pos: read_offset(t.get("Pos").map_err(|e| e.to_string())?)?,
});
}
Ok(tile_entities)
}
#[inline]
fn read_offset(offset: &Vec<i32>) -> Result<[i32; 3], String> {
match offset.len() {
3 => Ok([offset[0], offset[1], offset[2]]),
_ => Err("Invalid schematic: read_offset wrong length".to_string()),
}
}
#[inline]
fn read_palette(p: &CompoundTag) -> HashMap<String, i32> {
let mut palette = HashMap::new();
for (key, value) in p.iter() {
match value {
Tag::Int(n) => { palette.insert(key.clone(), *n); },
_ => {},
};
}
palette
}
#[inline]
fn compute_palette_max(palette: &CompoundTag) -> i32 {
palette.iter().map(|(_, v)| v).filter_map(|v| match v {
Tag::Int(n) => Some(*n),
_ => None,
}).max().unwrap_or(0)
}
#[inline]
fn read_blocks(blockdata: &Vec<i8>) -> Vec<i32> {
read_varint_array(blockdata)
}
#[inline]
pub fn read_varint_array(read: &Vec<i8>) -> Vec<i32> {
let mut data = Vec::new();
let mut value: i32 = 0;

View File

@ -1,6 +1,6 @@
[package]
name = "schemsearch-java"
version = "0.1.0"
version = "0.1.3"
edition = "2021"
license = "AGPL-3.0-or-later"

View File

@ -21,7 +21,7 @@ use jni::JNIEnv;
use jni::objects::{JClass, JString};
use jni::sys::jstring;
use schemsearch_files::Schematic;
use schemsearch_files::SpongeV2Schematic;
use schemsearch_lib::{search, SearchBehavior};
#[no_mangle]
@ -32,8 +32,8 @@ pub extern "system" fn Java_SchemSearch_search<'local>(mut env: JNIEnv<'local>,
pattern_path: JString<'local>) -> jstring {
let schematic_path: String = env.get_string(&schematic_path).expect("Couldn't get java string!").into();
let pattern_path: String = env.get_string(&pattern_path).expect("Couldn't get java string!").into();
let schematic = Schematic::load(&PathBuf::from(&schematic_path)).unwrap();
let pattern = Schematic::load(&PathBuf::from(&pattern_path)).unwrap();
let schematic = SpongeV2Schematic::load(&PathBuf::from(&schematic_path)).unwrap();
let pattern = SpongeV2Schematic::load(&PathBuf::from(&pattern_path)).unwrap();
let matches = search(schematic, &pattern, SearchBehavior {
ignore_block_data: true,
@ -45,8 +45,8 @@ pub extern "system" fn Java_SchemSearch_search<'local>(mut env: JNIEnv<'local>,
});
let mut result = String::new();
for (x, y, z, p) in matches {
result.push_str(&format!("{}, {}, {}, {};", x, y, z, p));
for m in matches {
result.push_str(&format!("{}, {}, {}, {};", m.x, m.y, m.z, m.percent));
}
result.remove(result.len() - 1);
let output = env.new_string(result).expect("Couldn't create java string!");

View File

@ -1,12 +1,12 @@
[package]
name = "schemsearch-lib"
version = "0.1.0"
version = "0.1.3"
edition = "2021"
license = "AGPL-3.0-or-later"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
hematite-nbt = "0.5.2"
serde = "1.0.152"
serde = { version = "1.0.160", features = ["derive"] }
schemsearch-files = { path = "../schemsearch-files" }
named-binary-tag = "0.6"

View File

@ -17,11 +17,12 @@
pub mod pattern_mapper;
use serde::{Serialize, Deserialize};
use pattern_mapper::match_palette;
use schemsearch_files::Schematic;
use schemsearch_files::SpongeSchematic;
use crate::pattern_mapper::match_palette_adapt;
#[derive(Debug, Clone, Copy)]
#[derive(Debug, Clone, Copy, Deserialize, Serialize)]
pub struct SearchBehavior {
pub ignore_block_data: bool,
pub ignore_block_entities: bool,
@ -32,10 +33,10 @@ pub struct SearchBehavior {
}
pub fn search(
schem: Schematic,
pattern_schem: &Schematic,
schem: SpongeSchematic,
pattern_schem: &SpongeSchematic,
search_behavior: SearchBehavior,
) -> Vec<(u16, u16, u16, f32)> {
) -> Vec<Match> {
if schem.width < pattern_schem.width || schem.height < pattern_schem.height || schem.length < pattern_schem.length {
return vec![];
}
@ -46,21 +47,21 @@ pub fn search(
let pattern_schem = match_palette(&schem, &pattern_schem, search_behavior.ignore_block_data);
let mut matches: Vec<(u16, u16, u16, f32)> = Vec::new();
let mut matches: Vec<Match> = Vec::new();
let pattern_data = pattern_schem.block_data.as_slice();
let schem_data = if search_behavior.ignore_block_data {
match_palette_adapt(&schem, &pattern_schem.palette, search_behavior.ignore_block_data)
} else {
schem.block_data
schem.block_data.clone()
};
let schem_data = schem_data.as_slice();
let air_id = if search_behavior.ignore_air || search_behavior.air_as_any { pattern_schem.palette.get("minecraft:air").unwrap_or(&-1) } else { &-1};
let pattern_blocks = (pattern_schem.width * pattern_schem.height * pattern_schem.length) as f32;
let pattern_blocks = pattern_data.len() as f32;
let pattern_width = pattern_schem.width as usize;
let pattern_height = pattern_schem.height as usize;
@ -89,7 +90,12 @@ pub fn search(
}
let matching_percent = matching as f32 / pattern_blocks;
if matching_percent >= search_behavior.threshold {
matches.push((x as u16, y as u16, z as u16, matching_percent));
matches.push(Match {
x: x as u16,
y: y as u16,
z: z as u16,
percent: matching_percent,
});
}
}
}
@ -98,6 +104,25 @@ pub fn search(
return matches;
}
#[derive(Debug, Clone, Copy, Deserialize, Serialize)]
pub struct Match {
pub x: u16,
pub y: u16,
pub z: u16,
pub percent: f32,
}
impl Default for Match {
fn default() -> Self {
Self {
x: 0,
y: 0,
z: 0,
percent: 0.0,
}
}
}
#[inline]
pub fn normalize_data(data: &str, ignore_data: bool) -> &str {
if ignore_data {
@ -107,67 +132,69 @@ pub fn normalize_data(data: &str, ignore_data: bool) -> &str {
}
}
pub fn parse_schematic(data: &Vec<u8>) -> Schematic {
if data[0] == 0x1f && data[1] == 0x8b {
// gzip
nbt::from_gzip_reader(data.as_slice()).unwrap()
} else {
// uncompressed
nbt::from_reader(data.as_slice()).unwrap()
}
}
#[allow(unused_imports)]
#[cfg(test)]
mod tests {
use std::path::{Path, PathBuf};
use schemsearch_files::Schematic;
use schemsearch_files::SchematicVersioned::V2;
use schemsearch_files::SpongeV2Schematic;
use crate::pattern_mapper::strip_data;
use super::*;
#[test]
fn read_schematic() {
let schematic = Schematic::load(&PathBuf::from("../tests/simple.schem")).unwrap();
let schematic = SchematicVersioned::load(&PathBuf::from("../tests/simple.schem")).unwrap();
let schematic = match schematic {
V2 (schematic) => schematic,
_ => panic!("Invalid schematic version"),
};
assert_eq!(schematic.width as usize * schematic.height as usize * schematic.length as usize, schematic.block_data.len());
assert_eq!(schematic.palette_max, schematic.palette.len() as i32);
}
#[test]
fn test_parse_function() {
let file = std::fs::File::open("../tests/simple.schem").expect("Failed to open file");
let schematic: Schematic = parse_schematic(&std::io::Read::bytes(file).map(|b| b.unwrap()).collect());
let schematic: SchematicVersioned = SchematicVersioned::load(&PathBuf::from("../tests/simple.schem")).unwrap();
let schematic = match schematic {
V2 (schematic) => schematic,
_ => panic!("Invalid schematic version"),
};
assert_eq!(schematic.width as usize * schematic.height as usize * schematic.length as usize, schematic.block_data.len());
assert_eq!(schematic.palette_max, schematic.palette.len() as i32);
}
#[test]
fn test_strip_schem() {
let schematic = Schematic::load(&PathBuf::from("../tests/simple.schem")).unwrap();
let schematic = SchematicVersioned::load(&PathBuf::from("../tests/simple.schem")).unwrap();
let stripped = strip_data(&schematic);
assert_eq!(stripped.palette.keys().any(|k| k.contains('[')), false);
assert_eq!(stripped.get_palette().keys().any(|k| k.contains('[')), false);
}
#[test]
fn test_match_palette() {
let schematic = Schematic::load(&PathBuf::from("../tests/simple.schem")).unwrap();
let endstone = Schematic::load(&PathBuf::from("../tests/endstone.schem")).unwrap();
let schematic = SchematicVersioned::load(&PathBuf::from("../tests/simple.schem")).unwrap();
let endstone = SchematicVersioned::load(&PathBuf::from("../tests/endstone.schem")).unwrap();
let _ = match_palette(&schematic, &endstone, true);
}
#[test]
fn test_match_palette_ignore_data() {
let schematic = Schematic::load(&PathBuf::from("../tests/simple.schem")).unwrap();
let endstone = Schematic::load(&PathBuf::from("../tests/endstone.schem")).unwrap();
let schematic = SchematicVersioned::load(&PathBuf::from("../tests/simple.schem")).unwrap();
let endstone = SchematicVersioned::load(&PathBuf::from("../tests/endstone.schem")).unwrap();
let _ = match_palette(&schematic, &endstone, false);
}
#[test]
pub fn test_big_search() {
let schematic = Schematic::load(&PathBuf::from("../tests/simple.schem")).unwrap();
let endstone = Schematic::load(&PathBuf::from("../tests/endstone.schem")).unwrap();
let schematic = SchematicVersioned::load(&PathBuf::from("../tests/simple.schem")).unwrap();
let endstone = SchematicVersioned::load(&PathBuf::from("../tests/endstone.schem")).unwrap();
let _ = search(schematic, &endstone, SearchBehavior {
ignore_block_data: true,
@ -181,8 +208,8 @@ mod tests {
#[test]
pub fn test_search() {
let schematic = Schematic::load(&PathBuf::from("../tests/Random.schem")).unwrap();
let pattern = Schematic::load(&PathBuf::from("../tests/Pattern.schem")).unwrap();
let schematic = SchematicVersioned::load(&PathBuf::from("../tests/Random.schem")).unwrap();
let pattern = SchematicVersioned::load(&PathBuf::from("../tests/Pattern.schem")).unwrap();
let matches = search(schematic, &pattern, SearchBehavior {
ignore_block_data: true,
@ -195,13 +222,16 @@ mod tests {
println!("{:?}", matches);
assert_eq!(matches.len(), 1);
assert_eq!(matches[0], (1, 0, 3, 1.0));
assert_eq!(matches[0].x, 1);
assert_eq!(matches[0].y, 0);
assert_eq!(matches[0].z, 3);
assert_eq!(matches[0].percent, 1.0);
}
#[test]
pub fn test_search_ws() {
let schematic = Schematic::load(&PathBuf::from("../tests/warships/GreyFly-by-Bosslar.schem")).unwrap();
let pattern = Schematic::load(&PathBuf::from("../tests/gray_castle_complex.schem")).unwrap();
let schematic = SchematicVersioned::load(&PathBuf::from("../tests/warships/GreyFly-by-Bosslar.schem")).unwrap();
let pattern = SchematicVersioned::load(&PathBuf::from("../tests/gray_castle_complex.schem")).unwrap();
let matches = search(schematic, &pattern, SearchBehavior {
ignore_block_data: false,

View File

@ -15,11 +15,12 @@
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
use nbt::Map;
use schemsearch_files::Schematic;
use std::collections::HashMap;
use nbt::CompoundTag;
use schemsearch_files::SpongeSchematic;
use crate::normalize_data;
fn create_reverse_palette(schem: &Schematic) -> Vec<&str> {
fn create_reverse_palette(schem: &SpongeSchematic) -> Vec<&str> {
let mut reverse_palette = Vec::with_capacity(schem.palette_max as usize);
(0..schem.palette_max).for_each(|_| reverse_palette.push(""));
for (key, value) in schem.palette.iter() {
@ -28,10 +29,10 @@ fn create_reverse_palette(schem: &Schematic) -> Vec<&str> {
reverse_palette
}
pub fn strip_data(schem: &Schematic) -> Schematic {
pub fn strip_data(schem: &SpongeSchematic) -> SpongeSchematic {
let mut data: Vec<i32> = Vec::new();
let mut palette: Map<String, i32> = Map::new();
let mut palette: HashMap<String, i32> = HashMap::new();
let mut palette_max: i32 = 0;
let reverse_palette = create_reverse_palette(schem);
@ -47,9 +48,8 @@ pub fn strip_data(schem: &Schematic) -> Schematic {
data.push(*entry);
}
Schematic {
version: schem.version,
data_version: schem.data_version,
SpongeSchematic {
data_version: 1,
palette,
palette_max,
block_data: data,
@ -57,17 +57,19 @@ pub fn strip_data(schem: &Schematic) -> Schematic {
height: schem.height,
length: schem.length,
width: schem.width,
metadata: schem.metadata.clone(),
offset: schem.offset.clone(),
metadata: CompoundTag::new(),
offset: [0; 3],
entities: None,
}
}
pub fn match_palette_adapt(schem: &Schematic, matching_palette: &Map<String, i32>, ignore_data: bool) -> Vec<i32> {
pub fn match_palette_adapt(schem: &SpongeSchematic, matching_palette: &HashMap<String, i32>, ignore_data: bool) -> Vec<i32> {
let mut data: Vec<i32> = Vec::new();
let reverse_palette = create_reverse_palette(schem);
for x in &schem.block_data {
for x in schem.block_data.iter() {
let blockname = reverse_palette[*x as usize];
let blockname = if ignore_data { normalize_data(blockname, ignore_data) } else { blockname };
let block_id = match matching_palette.get(&*blockname) {
@ -81,10 +83,10 @@ pub fn match_palette_adapt(schem: &Schematic, matching_palette: &Map<String, i32
}
pub fn match_palette(
schem: &Schematic,
pattern: &Schematic,
schem: &SpongeSchematic,
pattern: &SpongeSchematic,
ignore_data: bool,
) -> Schematic {
) -> SpongeSchematic {
if ignore_data {
match_palette_internal(&strip_data(schem), &strip_data(pattern), ignore_data)
} else {
@ -93,24 +95,23 @@ pub fn match_palette(
}
fn match_palette_internal(
schem: &Schematic,
pattern: &Schematic,
schem: &SpongeSchematic,
pattern: &SpongeSchematic,
ignore_data: bool,
) -> Schematic {
) -> SpongeSchematic {
let data_pattern: Vec<i32> = match_palette_adapt(&pattern, &schem.palette, ignore_data);
Schematic {
version: pattern.version.clone(),
data_version: pattern.data_version.clone(),
SpongeSchematic {
data_version: 0,
palette: schem.palette.clone(),
palette_max: schem.palette_max,
block_data: data_pattern,
block_entities: pattern.block_entities.clone(),
height: pattern.height.clone(),
length: pattern.length.clone(),
width: pattern.width.clone(),
metadata: pattern.metadata.clone(),
offset: pattern.offset.clone(),
height: pattern.height,
length: pattern.length,
width: pattern.width,
metadata: CompoundTag::new(),
offset: [0; 3],
entities: None,
}
}

View File

@ -1,6 +1,6 @@
[package]
name = "schemsearch-sql"
version = "0.1.0"
version = "0.1.3"
edition = "2021"
license = "AGPL-3.0-or-later"

View File

@ -16,7 +16,7 @@
*/
use std::sync::Mutex;
use sqlx::{ConnectOptions, Executor, MySql, MySqlPool, Pool, Row};
use sqlx::{Executor, MySql, Pool, Row};
use sqlx::mysql::{MySqlConnectOptions, MySqlPoolOptions};
use crate::filter::SchematicFilter;