mirror of
https://github.com/Chaoscaot/schemsearch.git
synced 2025-11-05 05:54:02 +01:00
Compare commits
33 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 2a584e878f | |||
| 33f5fe03fe | |||
| 0e6f2c3f78 | |||
| 82108d9e36 | |||
| d20940f89b | |||
| e3e6e9f759 | |||
| ccae2ba393 | |||
| 6c6c95bedd | |||
| 582079c80d | |||
| e25aeab065 | |||
| aee3a80267 | |||
| 5107e04497 | |||
| a357da2ce8 | |||
| eb84adb4a3 | |||
| 25c4e97f71 | |||
| 7d9e7f28a9 | |||
| ef1f8ed38f | |||
| 4671f38591 | |||
| 5cff84538d | |||
| 9a0b0535c6 | |||
| a47c2f44bd | |||
| 246927d840 | |||
| d1a01dc0c1 | |||
| e03a805bdb | |||
| 9cca860db3 | |||
| 80f5191ae8 | |||
| 3f20cbc17f | |||
| 733aaa9e72 | |||
| 14866df17d | |||
| 00e3d6fd0f | |||
| fb8f935617 | |||
| 2a112ac49c | |||
| a1b5449f06 |
6
.github/dependabot.yml
vendored
Normal file
6
.github/dependabot.yml
vendored
Normal file
@ -0,0 +1,6 @@
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "cargo"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
24
.github/workflows/master-build.yml
vendored
24
.github/workflows/master-build.yml
vendored
@ -19,32 +19,36 @@ jobs:
|
||||
- name: Cache Cargo modules
|
||||
id: cache-cargo
|
||||
uses: actions/cache@v3
|
||||
env:
|
||||
cache-name: cache-cargo-target-debug
|
||||
continue-on-error: false
|
||||
with:
|
||||
path: target
|
||||
key: ${{ runner.os }}-${{ env.cache-name }}-${{ hashFiles('**/Cargo.lock') }}
|
||||
path: |
|
||||
~/.cargo/registry
|
||||
~/.cargo/git
|
||||
key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-${{ env.cache-name }}-
|
||||
${{ runner.os }}-cargo-
|
||||
- name: Build
|
||||
run: make debug
|
||||
- name: Run tests
|
||||
run: cargo test --verbose -p schemsearch-lib
|
||||
|
||||
build-release:
|
||||
needs:
|
||||
- build
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Cache Cargo modules
|
||||
id: cache-cargo
|
||||
uses: actions/cache@v3
|
||||
env:
|
||||
cache-name: cache-cargo-target-release
|
||||
continue-on-error: false
|
||||
with:
|
||||
path: target
|
||||
key: ${{ runner.os }}-${{ env.cache-name }}-${{ hashFiles('**/Cargo.lock') }}
|
||||
path: |
|
||||
~/.cargo/registry
|
||||
~/.cargo/git
|
||||
key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-${{ env.cache-name }}-
|
||||
${{ runner.os }}-cargo-
|
||||
- name: Build
|
||||
run: make
|
||||
- name: Upload a Build Artifact
|
||||
|
||||
11
.github/workflows/release-build.yml
vendored
11
.github/workflows/release-build.yml
vendored
@ -22,13 +22,14 @@ jobs:
|
||||
- name: Cache Cargo modules
|
||||
id: cache-cargo
|
||||
uses: actions/cache@v3
|
||||
env:
|
||||
cache-name: cache-cargo-target-release
|
||||
continue-on-error: false
|
||||
with:
|
||||
path: target
|
||||
key: ${{ runner.os }}-${{ env.cache-name }}-${{ hashFiles('**/Cargo.lock') }}
|
||||
path: |
|
||||
~/.cargo/registry
|
||||
~/.cargo/git
|
||||
key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-${{ env.cache-name }}-
|
||||
${{ runner.os }}-cargo-
|
||||
- name: Build
|
||||
run: make
|
||||
- name: Create Tarball
|
||||
|
||||
128
CODE_OF_CONDUCT.md
Normal file
128
CODE_OF_CONDUCT.md
Normal file
@ -0,0 +1,128 @@
|
||||
# Contributor Covenant Code of Conduct
|
||||
|
||||
## Our Pledge
|
||||
|
||||
We as members, contributors, and leaders pledge to make participation in our
|
||||
community a harassment-free experience for everyone, regardless of age, body
|
||||
size, visible or invisible disability, ethnicity, sex characteristics, gender
|
||||
identity and expression, level of experience, education, socio-economic status,
|
||||
nationality, personal appearance, race, religion, or sexual identity
|
||||
and orientation.
|
||||
|
||||
We pledge to act and interact in ways that contribute to an open, welcoming,
|
||||
diverse, inclusive, and healthy community.
|
||||
|
||||
## Our Standards
|
||||
|
||||
Examples of behavior that contributes to a positive environment for our
|
||||
community include:
|
||||
|
||||
* Demonstrating empathy and kindness toward other people
|
||||
* Being respectful of differing opinions, viewpoints, and experiences
|
||||
* Giving and gracefully accepting constructive feedback
|
||||
* Accepting responsibility and apologizing to those affected by our mistakes,
|
||||
and learning from the experience
|
||||
* Focusing on what is best not just for us as individuals, but for the
|
||||
overall community
|
||||
|
||||
Examples of unacceptable behavior include:
|
||||
|
||||
* The use of sexualized language or imagery, and sexual attention or
|
||||
advances of any kind
|
||||
* Trolling, insulting or derogatory comments, and personal or political attacks
|
||||
* Public or private harassment
|
||||
* Publishing others' private information, such as a physical or email
|
||||
address, without their explicit permission
|
||||
* Other conduct which could reasonably be considered inappropriate in a
|
||||
professional setting
|
||||
|
||||
## Enforcement Responsibilities
|
||||
|
||||
Community leaders are responsible for clarifying and enforcing our standards of
|
||||
acceptable behavior and will take appropriate and fair corrective action in
|
||||
response to any behavior that they deem inappropriate, threatening, offensive,
|
||||
or harmful.
|
||||
|
||||
Community leaders have the right and responsibility to remove, edit, or reject
|
||||
comments, commits, code, wiki edits, issues, and other contributions that are
|
||||
not aligned to this Code of Conduct, and will communicate reasons for moderation
|
||||
decisions when appropriate.
|
||||
|
||||
## Scope
|
||||
|
||||
This Code of Conduct applies within all community spaces, and also applies when
|
||||
an individual is officially representing the community in public spaces.
|
||||
Examples of representing our community include using an official e-mail address,
|
||||
posting via an official social media account, or acting as an appointed
|
||||
representative at an online or offline event.
|
||||
|
||||
## Enforcement
|
||||
|
||||
Instances of abusive, harassing, or otherwise unacceptable behavior may be
|
||||
reported to the community leaders responsible for enforcement at
|
||||
chaoscaot@zohomail.eu.
|
||||
All complaints will be reviewed and investigated promptly and fairly.
|
||||
|
||||
All community leaders are obligated to respect the privacy and security of the
|
||||
reporter of any incident.
|
||||
|
||||
## Enforcement Guidelines
|
||||
|
||||
Community leaders will follow these Community Impact Guidelines in determining
|
||||
the consequences for any action they deem in violation of this Code of Conduct:
|
||||
|
||||
### 1. Correction
|
||||
|
||||
**Community Impact**: Use of inappropriate language or other behavior deemed
|
||||
unprofessional or unwelcome in the community.
|
||||
|
||||
**Consequence**: A private, written warning from community leaders, providing
|
||||
clarity around the nature of the violation and an explanation of why the
|
||||
behavior was inappropriate. A public apology may be requested.
|
||||
|
||||
### 2. Warning
|
||||
|
||||
**Community Impact**: A violation through a single incident or series
|
||||
of actions.
|
||||
|
||||
**Consequence**: A warning with consequences for continued behavior. No
|
||||
interaction with the people involved, including unsolicited interaction with
|
||||
those enforcing the Code of Conduct, for a specified period of time. This
|
||||
includes avoiding interactions in community spaces as well as external channels
|
||||
like social media. Violating these terms may lead to a temporary or
|
||||
permanent ban.
|
||||
|
||||
### 3. Temporary Ban
|
||||
|
||||
**Community Impact**: A serious violation of community standards, including
|
||||
sustained inappropriate behavior.
|
||||
|
||||
**Consequence**: A temporary ban from any sort of interaction or public
|
||||
communication with the community for a specified period of time. No public or
|
||||
private interaction with the people involved, including unsolicited interaction
|
||||
with those enforcing the Code of Conduct, is allowed during this period.
|
||||
Violating these terms may lead to a permanent ban.
|
||||
|
||||
### 4. Permanent Ban
|
||||
|
||||
**Community Impact**: Demonstrating a pattern of violation of community
|
||||
standards, including sustained inappropriate behavior, harassment of an
|
||||
individual, or aggression toward or disparagement of classes of individuals.
|
||||
|
||||
**Consequence**: A permanent ban from any sort of public interaction within
|
||||
the community.
|
||||
|
||||
## Attribution
|
||||
|
||||
This Code of Conduct is adapted from the [Contributor Covenant][homepage],
|
||||
version 2.0, available at
|
||||
https://www.contributor-covenant.org/version/2/0/code_of_conduct.html.
|
||||
|
||||
Community Impact Guidelines were inspired by [Mozilla's code of conduct
|
||||
enforcement ladder](https://github.com/mozilla/diversity).
|
||||
|
||||
[homepage]: https://www.contributor-covenant.org
|
||||
|
||||
For answers to common questions about this code of conduct, see the FAQ at
|
||||
https://www.contributor-covenant.org/faq. Translations are available at
|
||||
https://www.contributor-covenant.org/translations.
|
||||
1
Cargo.toml
Normal file → Executable file
1
Cargo.toml
Normal file → Executable file
@ -7,6 +7,7 @@ members = [
|
||||
"schemsearch-sql",
|
||||
"schemsearch-java"
|
||||
]
|
||||
resolver = "2"
|
||||
|
||||
[profile.small]
|
||||
inherits = "release"
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
# schemsearch
|
||||
### A *simple* CLI tool to search in Sponge V2 Schematic files
|
||||
### A *simple* CLI tool to search in Sponge Schematic files
|
||||
|
||||
---
|
||||
|
||||
@ -43,7 +43,6 @@ schemsearch-cli --help
|
||||
## Roadmap
|
||||
A list of features that are planned to be implemented in the future. In order of priority.
|
||||
|
||||
- [ ] Full JSON output (Progressbar)
|
||||
- [ ] Use AVX2 for faster search
|
||||
- [ ] Tile entities data search
|
||||
- [ ] Entities search
|
||||
|
||||
4
schemsearch-cli/Cargo.toml
Normal file → Executable file
4
schemsearch-cli/Cargo.toml
Normal file → Executable file
@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "schemsearch-cli"
|
||||
version = "0.1.1"
|
||||
version = "0.1.7"
|
||||
edition = "2021"
|
||||
license = "AGPL-3.0-or-later"
|
||||
|
||||
@ -13,7 +13,7 @@ schemsearch-sql = { path = "../schemsearch-sql", optional = true }
|
||||
|
||||
clap = { version = "4.1.8", features = ["cargo"] }
|
||||
futures = { version = "0.3", optional = true }
|
||||
sqlx = { version = "0.6", features = [ "runtime-async-std-native-tls" , "mysql" ], optional = true }
|
||||
sqlx = { version = "0.7", features = [ "runtime-async-std-native-tls" , "mysql" ], optional = true }
|
||||
rayon = "1.7.0"
|
||||
indicatif = { version = "0.17.3", features = ["rayon"] }
|
||||
serde = "1.0.157"
|
||||
|
||||
114
schemsearch-cli/src/main.rs
Normal file → Executable file
114
schemsearch-cli/src/main.rs
Normal file → Executable file
@ -26,8 +26,8 @@ use clap::{command, Arg, ArgAction, ValueHint};
|
||||
use std::path::PathBuf;
|
||||
use std::str::FromStr;
|
||||
use clap::error::ErrorKind;
|
||||
use schemsearch_lib::{Match, search, SearchBehavior};
|
||||
use crate::types::{PathSchematicSupplier, SchematicSupplierType};
|
||||
use schemsearch_lib::{Match, SearchBehavior};
|
||||
use crate::types::{PathSchematicSupplier, SchematicSupplier, SchematicSupplierType};
|
||||
#[cfg(feature = "sql")]
|
||||
use futures::executor::block_on;
|
||||
use rayon::prelude::*;
|
||||
@ -39,18 +39,20 @@ use schemsearch_sql::load_all_schematics;
|
||||
#[cfg(feature = "sql")]
|
||||
use crate::types::SqlSchematicSupplier;
|
||||
use indicatif::*;
|
||||
use schemsearch_files::Schematic;
|
||||
use schemsearch_files::SpongeSchematic;
|
||||
use crate::sinks::{OutputFormat, OutputSink};
|
||||
use crate::stderr::MaschineStdErr;
|
||||
use schemsearch_lib::nbt_search::has_invalid_nbt;
|
||||
use schemsearch_lib::search::search;
|
||||
|
||||
fn main() {
|
||||
#[allow(unused_mut)]
|
||||
let mut cmd = command!("schemsearch")
|
||||
let mut cmd = command!("schemsearch")
|
||||
.arg(
|
||||
Arg::new("pattern")
|
||||
.help("The pattern to search for")
|
||||
.required(true)
|
||||
.value_hint(ValueHint::FilePath)
|
||||
.required_unless_present("invalid-nbt")
|
||||
.action(ArgAction::Set),
|
||||
)
|
||||
.arg(
|
||||
@ -94,6 +96,13 @@ fn main() {
|
||||
.long("air-as-any")
|
||||
.action(ArgAction::SetTrue),
|
||||
)
|
||||
.arg(
|
||||
Arg::new("invalid-nbt")
|
||||
.help("Search for Schematics with Invalid or missing NBT data")
|
||||
.short('I')
|
||||
.long("invalid-nbt")
|
||||
.action(ArgAction::SetTrue),
|
||||
)
|
||||
.arg(
|
||||
Arg::new("output")
|
||||
.help("The output format and path [Format:Path] available formats: text, json, csv; available paths: std, err, (file path)")
|
||||
@ -134,7 +143,7 @@ fn main() {
|
||||
)
|
||||
.arg(
|
||||
Arg::new("threads")
|
||||
.help("The number of threads to use [0 = Available Threads]")
|
||||
.help("The number of threads to use [0 = all Available Threads]")
|
||||
.short('T')
|
||||
.long("threads")
|
||||
.action(ArgAction::Set)
|
||||
@ -150,13 +159,22 @@ fn main() {
|
||||
.default_value("0")
|
||||
.value_parser(|s: &str| s.parse::<u16>().map_err(|e| e.to_string()))
|
||||
)
|
||||
.arg(
|
||||
Arg::new("limit")
|
||||
.help("The maximum number of matches to return [0 = Unlimited]")
|
||||
.short('l')
|
||||
.long("limit")
|
||||
.action(ArgAction::Set)
|
||||
.default_value("50")
|
||||
.value_parser(|s: &str| s.parse::<usize>().map_err(|e| e.to_string())),
|
||||
)
|
||||
.about("Searches for a pattern in a schematic")
|
||||
.bin_name("schemsearch");
|
||||
|
||||
#[cfg(feature = "sql")]
|
||||
let mut cmd = cmd
|
||||
let mut cmd = cmd
|
||||
.arg(
|
||||
Arg::new("sql")
|
||||
Arg::new("sql")
|
||||
.help("Use the SteamWar SQL Database")
|
||||
.short('s')
|
||||
.long("sql")
|
||||
@ -195,18 +213,22 @@ fn main() {
|
||||
air_as_any: matches.get_flag("air-as-any"),
|
||||
ignore_entities: matches.get_flag("ignore-entities"),
|
||||
threshold: *matches.get_one::<f32>("threshold").expect("Couldn't get threshold"),
|
||||
invalid_nbt: matches.get_flag("invalid-nbt"),
|
||||
};
|
||||
|
||||
let pattern = match Schematic::load(&PathBuf::from(matches.get_one::<String>("pattern").unwrap())) {
|
||||
Ok(x) => x,
|
||||
Err(e) => {
|
||||
cmd.error(ErrorKind::Io, format!("Error while loading Pattern: {}", e.to_string())).exit();
|
||||
}
|
||||
let pattern = match matches.get_one::<String>("pattern") {
|
||||
Some(p) => match SpongeSchematic::load(&PathBuf::from(p)) {
|
||||
Ok(x) => Some(x),
|
||||
Err(e) => {
|
||||
cmd.error(ErrorKind::Io, format!("Error while loading Pattern: {}", e.to_string())).exit();
|
||||
}
|
||||
},
|
||||
None => None,
|
||||
};
|
||||
|
||||
let mut schematics: Vec<SchematicSupplierType> = Vec::new();
|
||||
match matches.get_many::<String>("schematic") {
|
||||
None => {},
|
||||
None => {}
|
||||
Some(x) => {
|
||||
let paths = x.map(|x| PathBuf::from(x));
|
||||
for path in paths {
|
||||
@ -217,12 +239,12 @@ fn main() {
|
||||
.filter(|x| x.path().is_file())
|
||||
.filter(|x| x.path().extension().unwrap().to_str().unwrap() == "schem")
|
||||
.for_each(|x| {
|
||||
schematics.push(SchematicSupplierType::PATH(Box::new(PathSchematicSupplier {
|
||||
schematics.push(SchematicSupplierType::PATH(PathSchematicSupplier {
|
||||
path: x.path(),
|
||||
})))
|
||||
}))
|
||||
});
|
||||
} else if path.extension().unwrap().to_str().unwrap() == "schem" {
|
||||
schematics.push(SchematicSupplierType::PATH(Box::new(PathSchematicSupplier { path })));
|
||||
schematics.push(SchematicSupplierType::PATH(PathSchematicSupplier { path }));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -238,7 +260,7 @@ fn main() {
|
||||
filter = filter.name(x.collect());
|
||||
}
|
||||
for schem in block_on(load_all_schematics(filter)) {
|
||||
schematics.push(SchematicSupplierType::SQL(SqlSchematicSupplier{
|
||||
schematics.push(SchematicSupplierType::SQL(SqlSchematicSupplier {
|
||||
node: schem
|
||||
}))
|
||||
};
|
||||
@ -264,6 +286,8 @@ fn main() {
|
||||
bar.set_draw_target(ProgressDrawTarget::term_like(Box::new(MaschineStdErr { size: term_size })))
|
||||
}
|
||||
|
||||
let max_matching = *matches.get_one::<usize>("limit").expect("Could not get max-matching");
|
||||
|
||||
let matches: Vec<SearchResult> = schematics.par_iter().progress_with(bar).map(|schem| {
|
||||
match schem {
|
||||
SchematicSupplierType::PATH(schem) => {
|
||||
@ -271,28 +295,20 @@ fn main() {
|
||||
Some(x) => x,
|
||||
None => return SearchResult {
|
||||
name: schem.get_name(),
|
||||
matches: Vec::default()
|
||||
matches: Vec::default(),
|
||||
}
|
||||
};
|
||||
SearchResult {
|
||||
name: schem.get_name(),
|
||||
matches: search(schematic, &pattern, search_behavior)
|
||||
}
|
||||
search_in_schem(schematic, pattern.as_ref(), search_behavior, schem)
|
||||
}
|
||||
#[cfg(feature = "sql")]
|
||||
SchematicSupplierType::SQL(schem) => {
|
||||
match schem.get_schematic() {
|
||||
Ok(schematic) => {
|
||||
SearchResult {
|
||||
name: schem.get_name(),
|
||||
matches: search(schematic, &pattern, search_behavior)
|
||||
}
|
||||
}
|
||||
Ok(schematic) => search_in_schem(schematic, pattern.as_ref(), search_behavior, schem),
|
||||
Err(e) => {
|
||||
eprintln!("Error while loading schematic ({}): {}", schem.get_name(), e.to_string());
|
||||
SearchResult {
|
||||
name: schem.get_name(),
|
||||
matches: Vec::default()
|
||||
matches: Vec::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -300,13 +316,19 @@ fn main() {
|
||||
}
|
||||
}).collect();
|
||||
|
||||
for matching in matches {
|
||||
let mut matches_count = 0;
|
||||
|
||||
'outer: for matching in matches {
|
||||
let schem_name = matching.name;
|
||||
let matching = matching.matches;
|
||||
for x in matching {
|
||||
for out in &mut output {
|
||||
write!(out.1, "{}", out.0.found_match(&schem_name, x)).unwrap();
|
||||
}
|
||||
matches_count += 1;
|
||||
if max_matching != 0 && matches_count >= max_matching {
|
||||
break 'outer;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -317,8 +339,34 @@ fn main() {
|
||||
}
|
||||
}
|
||||
|
||||
fn load_schem(schem_path: &PathBuf) -> Option<Schematic> {
|
||||
match Schematic::load(schem_path) {
|
||||
fn search_in_schem(schematic: SpongeSchematic, pattern: Option<&SpongeSchematic>, search_behavior: SearchBehavior, schem: &impl SchematicSupplier) -> SearchResult {
|
||||
if search_behavior.invalid_nbt {
|
||||
if has_invalid_nbt(schematic) {
|
||||
SearchResult {
|
||||
name: schem.get_name(),
|
||||
matches: vec![Match {
|
||||
x: 0,
|
||||
y: 0,
|
||||
z: 0,
|
||||
percent: 1.0,
|
||||
}],
|
||||
}
|
||||
} else {
|
||||
SearchResult {
|
||||
name: schem.get_name(),
|
||||
matches: vec![],
|
||||
}
|
||||
}
|
||||
} else {
|
||||
SearchResult {
|
||||
name: schem.get_name(),
|
||||
matches: search(schematic, pattern.unwrap(), search_behavior),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn load_schem(schem_path: &PathBuf) -> Option<SpongeSchematic> {
|
||||
match SpongeSchematic::load(schem_path) {
|
||||
Ok(x) => Some(x),
|
||||
Err(e) => {
|
||||
println!("Error while loading schematic ({}): {}", schem_path.to_str().unwrap(), e.to_string());
|
||||
|
||||
2
schemsearch-cli/src/sinks.rs
Normal file → Executable file
2
schemsearch-cli/src/sinks.rs
Normal file → Executable file
@ -71,7 +71,7 @@ impl OutputFormat {
|
||||
pub fn start(&self, total: u32, search_behavior: &SearchBehavior, start_time: u128) -> String {
|
||||
match self {
|
||||
OutputFormat::Text => format!("Starting search in {} schematics\n", total),
|
||||
OutputFormat::CSV => format!("Name,X,Y,Z,Percent\n"),
|
||||
OutputFormat::CSV => "Name,X,Y,Z,Percent\n".to_owned(),
|
||||
OutputFormat::JSON => format!("{}\n", serde_json::to_string(&JsonEvent::Init(InitEvent {
|
||||
total,
|
||||
search_behavior: search_behavior.clone(),
|
||||
|
||||
29
schemsearch-cli/src/types.rs
Normal file → Executable file
29
schemsearch-cli/src/types.rs
Normal file → Executable file
@ -15,26 +15,32 @@
|
||||
* along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
#[cfg(feature = "sql")]
|
||||
use std::io::Cursor;
|
||||
use std::path::PathBuf;
|
||||
#[cfg(feature = "sql")]
|
||||
use futures::executor::block_on;
|
||||
#[allow(unused_imports)]
|
||||
use schemsearch_files::Schematic;
|
||||
#[cfg(feature = "sql")]
|
||||
use schemsearch_files::SpongeSchematic;
|
||||
#[cfg(feature = "sql")]
|
||||
use schemsearch_sql::{load_schemdata, SchematicNode};
|
||||
|
||||
pub enum SchematicSupplierType {
|
||||
PATH(Box<PathSchematicSupplier>),
|
||||
PATH(PathSchematicSupplier),
|
||||
#[cfg(feature = "sql")]
|
||||
SQL(SqlSchematicSupplier),
|
||||
}
|
||||
|
||||
pub trait SchematicSupplier {
|
||||
fn get_name(&self) -> String;
|
||||
}
|
||||
|
||||
pub struct PathSchematicSupplier {
|
||||
pub path: PathBuf,
|
||||
}
|
||||
|
||||
impl PathSchematicSupplier {
|
||||
pub fn get_name(&self) -> String {
|
||||
impl SchematicSupplier for PathSchematicSupplier {
|
||||
fn get_name(&self) -> String {
|
||||
self.path.file_stem().unwrap().to_str().unwrap().to_string()
|
||||
}
|
||||
}
|
||||
@ -46,12 +52,17 @@ pub struct SqlSchematicSupplier {
|
||||
|
||||
#[cfg(feature = "sql")]
|
||||
impl SqlSchematicSupplier {
|
||||
pub fn get_schematic(&self) -> Result<Schematic, String> {
|
||||
let schemdata = block_on(load_schemdata(self.node.id));
|
||||
Schematic::load_data(schemdata.as_slice())
|
||||
pub fn get_schematic(&self) -> Result<SpongeSchematic, String> {
|
||||
let mut schemdata = block_on(load_schemdata(self.node.id));
|
||||
SpongeSchematic::load_data(&mut Cursor::new(schemdata.as_mut_slice()))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_name(&self) -> String {
|
||||
#[cfg(feature = "sql")]
|
||||
impl SchematicSupplier for SqlSchematicSupplier {
|
||||
fn get_name(&self) -> String {
|
||||
format!("{} ({})", self.node.name, self.node.id)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "schemsearch_faster"
|
||||
version = "0.1.1"
|
||||
version = "0.1.3"
|
||||
edition = "2021"
|
||||
license = "AGPL-3.0-or-later"
|
||||
|
||||
|
||||
@ -16,9 +16,9 @@
|
||||
*/
|
||||
|
||||
use nbt::Map;
|
||||
use schemsearch_files::Schematic;
|
||||
use schemsearch_files::SpongeV2Schematic;
|
||||
|
||||
pub fn convert_to_search_space(schem: &Schematic, palette: &Vec<String>) -> Vec<Vec<u8>> {
|
||||
pub fn convert_to_search_space(schem: &SpongeV2Schematic, palette: &Vec<String>) -> Vec<Vec<u8>> {
|
||||
let mut data: Vec<Vec<u8>> = Vec::with_capacity(palette.len());
|
||||
let block_data = &schem.block_data;
|
||||
for name in palette {
|
||||
@ -48,26 +48,26 @@ pub fn unwrap_palette(palette: &Map<String, i32>) -> Vec<String> {
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::path::{Path, PathBuf};
|
||||
use schemsearch_files::Schematic;
|
||||
use schemsearch_files::SpongeV2Schematic;
|
||||
use crate::{convert_to_search_space, unwrap_palette};
|
||||
|
||||
//#[test]
|
||||
pub fn test() {
|
||||
let schematic = Schematic::load(&PathBuf::from("../tests/Pattern.schem")).unwrap();
|
||||
let schematic = SpongeV2Schematic::load(&PathBuf::from("../tests/Pattern.schem")).unwrap();
|
||||
dbg!(convert_to_search_space(&schematic, &unwrap_palette(&schematic.palette)));
|
||||
}
|
||||
|
||||
//#[test]
|
||||
pub fn test_2() {
|
||||
let schematic = Schematic::load(&PathBuf::from("../tests/Pattern.schem")).unwrap();
|
||||
let schematic2 = Schematic::load(&PathBuf::from("../tests/Random.schem")).unwrap();
|
||||
let schematic = SpongeV2Schematic::load(&PathBuf::from("../tests/Pattern.schem")).unwrap();
|
||||
let schematic2 = SpongeV2Schematic::load(&PathBuf::from("../tests/Random.schem")).unwrap();
|
||||
println!("{:?}", convert_to_search_space(&schematic2, &unwrap_palette(&schematic.palette)));
|
||||
}
|
||||
|
||||
//#[test]
|
||||
pub fn test_big() {
|
||||
let schematic = Schematic::load(&PathBuf::from("../tests/endstone.schem")).unwrap();
|
||||
let schematic2 = Schematic::load(&PathBuf::from("../tests/simple.schem")).unwrap();
|
||||
let schematic = SpongeV2Schematic::load(&PathBuf::from("../tests/endstone.schem")).unwrap();
|
||||
let schematic2 = SpongeV2Schematic::load(&PathBuf::from("../tests/simple.schem")).unwrap();
|
||||
let _ = convert_to_search_space(&schematic2, &unwrap_palette(&schematic.palette));
|
||||
}
|
||||
}
|
||||
@ -1,10 +1,11 @@
|
||||
[package]
|
||||
name = "schemsearch-files"
|
||||
version = "0.1.1"
|
||||
version = "0.1.5"
|
||||
edition = "2021"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
hematite-nbt = "0.5.2"
|
||||
serde = "1.0.152"
|
||||
flate2 = "1.0.25"
|
||||
named-binary-tag = "0.6"
|
||||
|
||||
|
||||
@ -15,81 +15,176 @@
|
||||
* along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
use std::collections::hash_map::HashMap;
|
||||
use std::io::Read;
|
||||
use std::path::PathBuf;
|
||||
use nbt::{Map, Value};
|
||||
use serde::{Deserialize, Deserializer, Serialize};
|
||||
use nbt::{CompoundTag, Tag};
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
pub struct Schematic {
|
||||
#[serde(rename = "Version")]
|
||||
pub version: i32,
|
||||
#[serde(rename = "DataVersion")]
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct SpongeSchematic {
|
||||
pub data_version: i32,
|
||||
#[serde(rename = "Metadata")]
|
||||
pub metadata: Map<String, Value>,
|
||||
#[serde(rename = "Width")]
|
||||
pub metadata: CompoundTag,
|
||||
pub width: u16,
|
||||
#[serde(rename = "Height")]
|
||||
pub height: u16,
|
||||
#[serde(rename = "Length")]
|
||||
pub length: u16,
|
||||
#[serde(rename = "Offset")]
|
||||
pub offset: [i32; 3],
|
||||
#[serde(rename = "PaletteMax")]
|
||||
pub palette_max: i32,
|
||||
#[serde(rename = "Palette")]
|
||||
pub palette: Map<String, i32>,
|
||||
#[serde(rename = "BlockData", deserialize_with = "read_blockdata")]
|
||||
pub palette: HashMap<String, i32>,
|
||||
pub block_data: Vec<i32>,
|
||||
#[serde(rename = "BlockEntities")]
|
||||
pub block_entities: Vec<BlockEntity>,
|
||||
#[serde(rename = "Entities")]
|
||||
pub entities: Option<Vec<Entity>>,
|
||||
}
|
||||
|
||||
fn read_blockdata<'de, D>(deserializer: D) -> Result<Vec<i32>, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
let s: Vec<i8> = Deserialize::deserialize(deserializer)?;
|
||||
Ok(read_varint_array(&s))
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct BlockContainer {
|
||||
pub palette: HashMap<String, i32>,
|
||||
pub block_data: Vec<i32>,
|
||||
pub block_entities: Vec<BlockEntity>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone)]
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct BlockEntity {
|
||||
#[serde(rename = "Id")]
|
||||
pub id: String,
|
||||
#[serde(rename = "Pos")]
|
||||
pub pos: [i32; 3],
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone)]
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct BlockEntityV3 {
|
||||
pub id: String,
|
||||
pub pos: [i32; 3],
|
||||
pub data: HashMap<String, Tag>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Entity {
|
||||
#[serde(rename = "Id")]
|
||||
pub id: String,
|
||||
#[serde(rename = "Pos")]
|
||||
pub pos: [i32; 3],
|
||||
}
|
||||
|
||||
impl Schematic {
|
||||
pub fn load_data<R>(data: R) -> Result<Schematic, String> where R: Read {
|
||||
let schematic: Schematic = match nbt::from_gzip_reader(data) {
|
||||
Ok(schem) => schem,
|
||||
Err(e) => return Err(format!("Failed to parse schematic: {}", e))
|
||||
};
|
||||
Ok(schematic)
|
||||
impl SpongeSchematic {
|
||||
pub fn load_data<R>(data: &mut R) -> Result<SpongeSchematic, String> where R: Read {
|
||||
let nbt: CompoundTag = nbt::decode::read_gzip_compound_tag(data).map_err(|e| e.to_string())?;
|
||||
let version = nbt.get_i32("Version").unwrap_or_else(|_| {
|
||||
return if nbt.contains_key("Blocks") {
|
||||
3
|
||||
} else if nbt.contains_key("BlockEntities") {
|
||||
2
|
||||
} else if nbt.contains_key("TileEntities") {
|
||||
1
|
||||
} else {
|
||||
-1
|
||||
};
|
||||
});
|
||||
|
||||
match version {
|
||||
1 => SpongeSchematic::from_nbt_1(nbt),
|
||||
2 => SpongeSchematic::from_nbt_2(nbt),
|
||||
3 => SpongeSchematic::from_nbt_3(nbt),
|
||||
_ => Err("Invalid schematic: Unknown Version".to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn load(path: &PathBuf) -> Result<Schematic, String> {
|
||||
let file = match std::fs::File::open(path) {
|
||||
Ok(x) => x,
|
||||
Err(_) => return Err(format!("Failed to open file: {}", path.to_str().unwrap()))
|
||||
};
|
||||
Schematic::load_data(file)
|
||||
pub fn load(path: &PathBuf) -> Result<SpongeSchematic, String> {
|
||||
let mut file = std::fs::File::open(path).map_err(|e| e.to_string())?;
|
||||
Self::load_data(&mut file)
|
||||
}
|
||||
|
||||
pub fn from_nbt_1(nbt: CompoundTag) -> Result<Self, String> {
|
||||
Ok(Self {
|
||||
data_version: 0,
|
||||
metadata: nbt.get_compound_tag("Metadata").map_err(|e| e.to_string())?.clone(),
|
||||
width: nbt.get_i16("Width").map_err(|e| e.to_string())? as u16,
|
||||
height: nbt.get_i16("Height").map_err(|e| e.to_string())? as u16,
|
||||
length: nbt.get_i16("Length").map_err(|e| e.to_string())? as u16,
|
||||
offset: read_offset(nbt.get_i32_vec("Offset").map_err(|e| e.to_string())?)?,
|
||||
palette_max: nbt.get_i32("PaletteMax").map_err(|e| e.to_string())?,
|
||||
palette: read_palette(nbt.get_compound_tag("Palette").map_err(|e| e.to_string())?),
|
||||
block_data: read_blocks(nbt.get_i8_vec("BlockData").map_err(|e| e.to_string())?),
|
||||
block_entities: read_tile_entities(nbt.get_compound_tag_vec("TileEntities").unwrap_or_else(|_| vec![]))?,
|
||||
entities: None,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn from_nbt_2(nbt: CompoundTag) -> Result<Self, String> {
|
||||
Ok(Self{
|
||||
data_version: nbt.get_i32("DataVersion").map_err(|e| e.to_string())?,
|
||||
metadata: nbt.get_compound_tag("Metadata").map_err(|e| e.to_string())?.clone(),
|
||||
width: nbt.get_i16("Width").map_err(|e| e.to_string())? as u16,
|
||||
height: nbt.get_i16("Height").map_err(|e| e.to_string())? as u16,
|
||||
length: nbt.get_i16("Length").map_err(|e| e.to_string())? as u16,
|
||||
offset: read_offset(nbt.get_i32_vec("Offset").map_err(|e| e.to_string())?)?,
|
||||
palette_max: nbt.get_i32("PaletteMax").map_err(|e| e.to_string())?,
|
||||
palette: read_palette(nbt.get_compound_tag("Palette").map_err(|e| e.to_string())?),
|
||||
block_data: read_blocks(nbt.get_i8_vec("BlockData").map_err(|e| e.to_string())?),
|
||||
block_entities: read_tile_entities(nbt.get_compound_tag_vec("BlockEntities").unwrap_or_else(|_| vec![]))?,
|
||||
entities: None,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn from_nbt_3(nbt: CompoundTag) -> Result<Self, String> {
|
||||
let blocks = nbt.get_compound_tag("Blocks").map_err(|e| e.to_string())?;
|
||||
Ok(Self{
|
||||
data_version: nbt.get_i32("DataVersion").map_err(|e| e.to_string())?,
|
||||
metadata: nbt.get_compound_tag("Metadata").map_err(|e| e.to_string())?.clone(),
|
||||
width: nbt.get_i16("Width").map_err(|e| e.to_string())? as u16,
|
||||
height: nbt.get_i16("Height").map_err(|e| e.to_string())? as u16,
|
||||
length: nbt.get_i16("Length").map_err(|e| e.to_string())? as u16,
|
||||
offset: read_offset(nbt.get_i32_vec("Offset").map_err(|e| e.to_string())?)?,
|
||||
palette_max: compute_palette_max(blocks.get_compound_tag("Palette").map_err(|e| e.to_string())?),
|
||||
palette: read_palette(blocks.get_compound_tag("Palette").map_err(|e| e.to_string())?),
|
||||
block_data: read_blocks(blocks.get_i8_vec("BlockData").map_err(|e| e.to_string())?),
|
||||
block_entities: read_tile_entities(blocks.get_compound_tag_vec("BlockEntities").unwrap_or_else(|_| vec![]))?,
|
||||
entities: None,
|
||||
})
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
fn read_tile_entities(tag: Vec<&CompoundTag>) -> Result<Vec<BlockEntity>, String> {
|
||||
let mut tile_entities = Vec::new();
|
||||
for t in tag {
|
||||
tile_entities.push(BlockEntity {
|
||||
id: t.get_str("Id").map_err(|e| e.to_string())?.to_string(),
|
||||
pos: read_offset(t.get("Pos").map_err(|e| e.to_string())?)?,
|
||||
});
|
||||
}
|
||||
Ok(tile_entities)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn read_offset(offset: &Vec<i32>) -> Result<[i32; 3], String> {
|
||||
match offset.len() {
|
||||
3 => Ok([offset[0], offset[1], offset[2]]),
|
||||
_ => Err("Invalid schematic: read_offset wrong length".to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn read_palette(p: &CompoundTag) -> HashMap<String, i32> {
|
||||
let mut palette = HashMap::new();
|
||||
for (key, value) in p.iter() {
|
||||
match value {
|
||||
Tag::Int(n) => { palette.insert(key.clone(), *n); },
|
||||
_ => {},
|
||||
};
|
||||
}
|
||||
palette
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn compute_palette_max(palette: &CompoundTag) -> i32 {
|
||||
palette.iter().map(|(_, v)| v).filter_map(|v| match v {
|
||||
Tag::Int(n) => Some(*n),
|
||||
_ => None,
|
||||
}).max().unwrap_or(0)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn read_blocks(blockdata: &Vec<i8>) -> Vec<i32> {
|
||||
read_varint_array(blockdata)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn read_varint_array(read: &Vec<i8>) -> Vec<i32> {
|
||||
let mut data = Vec::new();
|
||||
let mut value: i32 = 0;
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "schemsearch-java"
|
||||
version = "0.1.1"
|
||||
version = "0.1.3"
|
||||
edition = "2021"
|
||||
license = "AGPL-3.0-or-later"
|
||||
|
||||
|
||||
@ -21,7 +21,7 @@ use jni::JNIEnv;
|
||||
use jni::objects::{JClass, JString};
|
||||
|
||||
use jni::sys::jstring;
|
||||
use schemsearch_files::Schematic;
|
||||
use schemsearch_files::SpongeV2Schematic;
|
||||
use schemsearch_lib::{search, SearchBehavior};
|
||||
|
||||
#[no_mangle]
|
||||
@ -32,8 +32,8 @@ pub extern "system" fn Java_SchemSearch_search<'local>(mut env: JNIEnv<'local>,
|
||||
pattern_path: JString<'local>) -> jstring {
|
||||
let schematic_path: String = env.get_string(&schematic_path).expect("Couldn't get java string!").into();
|
||||
let pattern_path: String = env.get_string(&pattern_path).expect("Couldn't get java string!").into();
|
||||
let schematic = Schematic::load(&PathBuf::from(&schematic_path)).unwrap();
|
||||
let pattern = Schematic::load(&PathBuf::from(&pattern_path)).unwrap();
|
||||
let schematic = SpongeV2Schematic::load(&PathBuf::from(&schematic_path)).unwrap();
|
||||
let pattern = SpongeV2Schematic::load(&PathBuf::from(&pattern_path)).unwrap();
|
||||
|
||||
let matches = search(schematic, &pattern, SearchBehavior {
|
||||
ignore_block_data: true,
|
||||
|
||||
10
schemsearch-lib/Cargo.toml
Normal file → Executable file
10
schemsearch-lib/Cargo.toml
Normal file → Executable file
@ -1,12 +1,14 @@
|
||||
[package]
|
||||
name = "schemsearch-lib"
|
||||
version = "0.1.1"
|
||||
version = "0.1.7"
|
||||
edition = "2021"
|
||||
license = "AGPL-3.0-or-later"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
hematite-nbt = "0.5.2"
|
||||
serde = "1.0.152"
|
||||
schemsearch-files = { path = "../schemsearch-files" }
|
||||
serde = { version = "1.0.160", features = ["derive"] }
|
||||
schemsearch-files = { path = "../schemsearch-files" }
|
||||
named-binary-tag = "0.6"
|
||||
libmath = "0.2.1"
|
||||
lazy_static = "1.4.0"
|
||||
|
||||
163
schemsearch-lib/src/blocks.txt
Executable file
163
schemsearch-lib/src/blocks.txt
Executable file
@ -0,0 +1,163 @@
|
||||
oak_sign
|
||||
oak_wall_sign
|
||||
oak_hanging_sign
|
||||
oak_wall_hanging_sign
|
||||
birch_sign
|
||||
birch_wall_sign
|
||||
birch_hanging_sign
|
||||
birch_wall_hanging_sign
|
||||
spruce_sign
|
||||
spruce_wall_sign
|
||||
spruce_hanging_sign
|
||||
spruce_wall_hanging_sign
|
||||
jungle_sign
|
||||
jungle_wall_sign
|
||||
jungle_hanging_sign
|
||||
jungle_wall_hanging_sign
|
||||
dark_oak_sign
|
||||
dark_oak_wall_sign
|
||||
dark_oak_hanging_sign
|
||||
dark_oak_wall_hanging_sign
|
||||
acacia_sign
|
||||
acacia_wall_sign
|
||||
acacia_hanging_sign
|
||||
acacia_wall_hanging_sign
|
||||
mangrove_sign
|
||||
mangrove_wall_sign
|
||||
mangrove_hanging_sign
|
||||
mangrove_wall_hanging_sign
|
||||
cherry_sign
|
||||
cherry_wall_sign
|
||||
cherry_hanging_sign
|
||||
cherry_wall_hanging_sign
|
||||
bamboo_sign
|
||||
bamboo_wall_sign
|
||||
bamboo_hanging_sign
|
||||
bamboo_wall_hanging_sign
|
||||
warped_sign
|
||||
warped_wall_sign
|
||||
warped_hanging_sign
|
||||
warped_wall_hanging_sign
|
||||
crimson_sign
|
||||
crimson_wall_sign
|
||||
crimson_hanging_sign
|
||||
crimson_wall_hanging_sign
|
||||
suspicious_gravel
|
||||
suspicious_sand
|
||||
white_banner
|
||||
light_gray_banner
|
||||
gray_banner
|
||||
black_banner
|
||||
brown_banner
|
||||
red_banner
|
||||
orange_banner
|
||||
yellow_banner
|
||||
lime_banner
|
||||
green_banner
|
||||
cyan_banner
|
||||
light_blue_banner
|
||||
blue_banner
|
||||
purple_banner
|
||||
magenta_banner
|
||||
pink_banner
|
||||
white_wall_banner
|
||||
light_gray_wall_banner
|
||||
gray_wall_banner
|
||||
black_wall_banner
|
||||
brown_wall_banner
|
||||
red_wall_banner
|
||||
orange_wall_banner
|
||||
yellow_wall_banner
|
||||
lime_wall_banner
|
||||
green_wall_banner
|
||||
cyan_wall_banner
|
||||
light_blue_wall_banner
|
||||
blue_wall_banner
|
||||
purple_wall_banner
|
||||
magenta_wall_banner
|
||||
pink_wall_banner
|
||||
white_bed
|
||||
light_gray_bed
|
||||
gray_bed
|
||||
black_bed
|
||||
brown_bed
|
||||
red_bed
|
||||
orange_bed
|
||||
yellow_bed
|
||||
lime_bed
|
||||
green_bed
|
||||
cyan_bed
|
||||
light_blue_bed
|
||||
blue_bed
|
||||
purple_bed
|
||||
magenta_bed
|
||||
pink_bed
|
||||
shulker_box
|
||||
white_shulker_box
|
||||
light_gray_shulker_box
|
||||
gray_shulker_box
|
||||
black_shulker_box
|
||||
brown_shulker_box
|
||||
red_shulker_box
|
||||
orange_shulker_box
|
||||
yellow_shulker_box
|
||||
lime_shulker_box
|
||||
green_shulker_box
|
||||
cyan_shulker_box
|
||||
light_blue_shulker_box
|
||||
blue_shulker_box
|
||||
purple_shulker_box
|
||||
magenta_shulker_box
|
||||
pink_shulker_box
|
||||
furnace
|
||||
blast_furnace
|
||||
smoker
|
||||
chest
|
||||
trapped_chest
|
||||
ender_chest
|
||||
enchanting_table
|
||||
barrel
|
||||
lectern
|
||||
jukebox
|
||||
bell
|
||||
brewing_stand
|
||||
bee_nest
|
||||
beehive
|
||||
decorated_pot
|
||||
beacon
|
||||
conduit
|
||||
campfire
|
||||
soul_campfire
|
||||
redstone_comparator
|
||||
hopper
|
||||
dispenser
|
||||
dropper
|
||||
moving_piston
|
||||
daylight_detector
|
||||
sculk_sensor
|
||||
calibrated_sculk_sensor
|
||||
sculk_catalyst
|
||||
sculk_shrieker
|
||||
player_head
|
||||
player_wall_head
|
||||
wither_skeleton_skull
|
||||
wither_skeleton_wall_skull
|
||||
zombie_head
|
||||
zombie_wall_head
|
||||
skeleton_skull
|
||||
skeleton_wall_skull
|
||||
creeper_head
|
||||
creeper_wall_head
|
||||
piglin_head
|
||||
piglin_wall_head
|
||||
dragon_head
|
||||
dragon_wall_head
|
||||
chiseled_bookshelf
|
||||
command_block
|
||||
chain_command_block
|
||||
repeating_command_block
|
||||
structure_block
|
||||
jigsaw_block
|
||||
end_portal
|
||||
end_gateway
|
||||
monster_spawner
|
||||
148
schemsearch-lib/src/lib.rs
Normal file → Executable file
148
schemsearch-lib/src/lib.rs
Normal file → Executable file
@ -16,11 +16,10 @@
|
||||
*/
|
||||
|
||||
pub mod pattern_mapper;
|
||||
pub mod search;
|
||||
pub mod nbt_search;
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
use pattern_mapper::match_palette;
|
||||
use schemsearch_files::Schematic;
|
||||
use crate::pattern_mapper::match_palette_adapt;
|
||||
use serde::{Serialize, Deserialize};
|
||||
|
||||
#[derive(Debug, Clone, Copy, Deserialize, Serialize)]
|
||||
pub struct SearchBehavior {
|
||||
@ -30,81 +29,10 @@ pub struct SearchBehavior {
|
||||
pub air_as_any: bool,
|
||||
pub ignore_entities: bool,
|
||||
pub threshold: f32,
|
||||
pub invalid_nbt: bool,
|
||||
}
|
||||
|
||||
pub fn search(
|
||||
schem: Schematic,
|
||||
pattern_schem: &Schematic,
|
||||
search_behavior: SearchBehavior,
|
||||
) -> Vec<Match> {
|
||||
if schem.width < pattern_schem.width || schem.height < pattern_schem.height || schem.length < pattern_schem.length {
|
||||
return vec![];
|
||||
}
|
||||
|
||||
if pattern_schem.palette.len() > schem.palette.len() {
|
||||
return vec![];
|
||||
}
|
||||
|
||||
let pattern_schem = match_palette(&schem, &pattern_schem, search_behavior.ignore_block_data);
|
||||
|
||||
let mut matches: Vec<Match> = Vec::new();
|
||||
|
||||
let pattern_data = pattern_schem.block_data.as_slice();
|
||||
|
||||
let schem_data = if search_behavior.ignore_block_data {
|
||||
match_palette_adapt(&schem, &pattern_schem.palette, search_behavior.ignore_block_data)
|
||||
} else {
|
||||
schem.block_data
|
||||
};
|
||||
|
||||
let schem_data = schem_data.as_slice();
|
||||
|
||||
let air_id = if search_behavior.ignore_air || search_behavior.air_as_any { pattern_schem.palette.get("minecraft:air").unwrap_or(&-1) } else { &-1};
|
||||
|
||||
let pattern_blocks = pattern_data.len() as f32;
|
||||
|
||||
let pattern_width = pattern_schem.width as usize;
|
||||
let pattern_height = pattern_schem.height as usize;
|
||||
let pattern_length = pattern_schem.length as usize;
|
||||
|
||||
let schem_width = schem.width as usize;
|
||||
let schem_height = schem.height as usize;
|
||||
let schem_length = schem.length as usize;
|
||||
|
||||
for y in 0..=schem_height - pattern_height {
|
||||
for z in 0..=schem_length - pattern_length {
|
||||
for x in 0..=schem_width - pattern_width {
|
||||
let mut matching = 0;
|
||||
for j in 0..pattern_height {
|
||||
for k in 0..pattern_length {
|
||||
for i in 0..pattern_width {
|
||||
let index = (x + i) + schem_width * ((z + k) + (y + j) * schem_length);
|
||||
let pattern_index = i + pattern_width * (k + j * pattern_length);
|
||||
let data = unsafe {schem_data.get_unchecked(index) };
|
||||
let pattern_data = unsafe { pattern_data.get_unchecked(pattern_index) };
|
||||
if *data == *pattern_data || (search_behavior.ignore_air && *data == *air_id) || (search_behavior.air_as_any && *pattern_data == *air_id) {
|
||||
matching += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
let matching_percent = matching as f32 / pattern_blocks;
|
||||
if matching_percent >= search_behavior.threshold {
|
||||
matches.push(Match {
|
||||
x: x as u16,
|
||||
y: y as u16,
|
||||
z: z as u16,
|
||||
percent: matching_percent,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return matches;
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, Deserialize, Serialize)]
|
||||
#[derive(Debug, Clone, Copy, Default, Deserialize, Serialize)]
|
||||
pub struct Match {
|
||||
pub x: u16,
|
||||
pub y: u16,
|
||||
@ -112,17 +40,6 @@ pub struct Match {
|
||||
pub percent: f32,
|
||||
}
|
||||
|
||||
impl Default for Match {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
x: 0,
|
||||
y: 0,
|
||||
z: 0,
|
||||
percent: 0.0,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn normalize_data(data: &str, ignore_data: bool) -> &str {
|
||||
if ignore_data {
|
||||
@ -132,42 +49,34 @@ pub fn normalize_data(data: &str, ignore_data: bool) -> &str {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn parse_schematic(data: &Vec<u8>) -> Schematic {
|
||||
if data[0] == 0x1f && data[1] == 0x8b {
|
||||
// gzip
|
||||
nbt::from_gzip_reader(data.as_slice()).unwrap()
|
||||
} else {
|
||||
// uncompressed
|
||||
nbt::from_reader(data.as_slice()).unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(unused_imports)]
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::path::{Path, PathBuf};
|
||||
use schemsearch_files::Schematic;
|
||||
use crate::pattern_mapper::strip_data;
|
||||
use schemsearch_files::SpongeSchematic;
|
||||
use crate::pattern_mapper::{match_palette, strip_data};
|
||||
use crate::search::search;
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn read_schematic() {
|
||||
let schematic = Schematic::load(&PathBuf::from("../tests/simple.schem")).unwrap();
|
||||
let schematic = SpongeSchematic::load(&PathBuf::from("../tests/simple.schem")).unwrap();
|
||||
|
||||
assert_eq!(schematic.width as usize * schematic.height as usize * schematic.length as usize, schematic.block_data.len());
|
||||
assert_eq!(schematic.palette_max, schematic.palette.len() as i32);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_function() {
|
||||
let file = std::fs::File::open("../tests/simple.schem").expect("Failed to open file");
|
||||
let schematic: Schematic = parse_schematic(&std::io::Read::bytes(file).map(|b| b.unwrap()).collect());
|
||||
let schematic = SpongeSchematic::load(&PathBuf::from("../tests/simple.schem")).unwrap();
|
||||
|
||||
assert_eq!(schematic.width as usize * schematic.height as usize * schematic.length as usize, schematic.block_data.len());
|
||||
assert_eq!(schematic.palette_max, schematic.palette.len() as i32);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_strip_schem() {
|
||||
let schematic = Schematic::load(&PathBuf::from("../tests/simple.schem")).unwrap();
|
||||
let schematic = SpongeSchematic::load(&PathBuf::from("../tests/simple.schem")).unwrap();
|
||||
let stripped = strip_data(&schematic);
|
||||
|
||||
assert_eq!(stripped.palette.keys().any(|k| k.contains('[')), false);
|
||||
@ -175,24 +84,24 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn test_match_palette() {
|
||||
let schematic = Schematic::load(&PathBuf::from("../tests/simple.schem")).unwrap();
|
||||
let endstone = Schematic::load(&PathBuf::from("../tests/endstone.schem")).unwrap();
|
||||
let schematic = SpongeSchematic::load(&PathBuf::from("../tests/simple.schem")).unwrap();
|
||||
let endstone = SpongeSchematic::load(&PathBuf::from("../tests/endstone.schem")).unwrap();
|
||||
|
||||
let _ = match_palette(&schematic, &endstone, true);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_match_palette_ignore_data() {
|
||||
let schematic = Schematic::load(&PathBuf::from("../tests/simple.schem")).unwrap();
|
||||
let endstone = Schematic::load(&PathBuf::from("../tests/endstone.schem")).unwrap();
|
||||
let schematic = SpongeSchematic::load(&PathBuf::from("../tests/simple.schem")).unwrap();
|
||||
let endstone = SpongeSchematic::load(&PathBuf::from("../tests/endstone.schem")).unwrap();
|
||||
|
||||
let _ = match_palette(&schematic, &endstone, false);
|
||||
}
|
||||
|
||||
#[test]
|
||||
pub fn test_big_search() {
|
||||
let schematic = Schematic::load(&PathBuf::from("../tests/simple.schem")).unwrap();
|
||||
let endstone = Schematic::load(&PathBuf::from("../tests/endstone.schem")).unwrap();
|
||||
let schematic = SpongeSchematic::load(&PathBuf::from("../tests/simple.schem")).unwrap();
|
||||
let endstone = SpongeSchematic::load(&PathBuf::from("../tests/endstone.schem")).unwrap();
|
||||
|
||||
let _ = search(schematic, &endstone, SearchBehavior {
|
||||
ignore_block_data: true,
|
||||
@ -200,14 +109,15 @@ mod tests {
|
||||
ignore_entities: true,
|
||||
ignore_air: false,
|
||||
air_as_any: false,
|
||||
threshold: 0.9
|
||||
threshold: 0.9,
|
||||
invalid_nbt: false
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
pub fn test_search() {
|
||||
let schematic = Schematic::load(&PathBuf::from("../tests/Random.schem")).unwrap();
|
||||
let pattern = Schematic::load(&PathBuf::from("../tests/Pattern.schem")).unwrap();
|
||||
let schematic = SpongeSchematic::load(&PathBuf::from("../tests/Random.schem")).unwrap();
|
||||
let pattern = SpongeSchematic::load(&PathBuf::from("../tests/Pattern.schem")).unwrap();
|
||||
|
||||
let matches = search(schematic, &pattern, SearchBehavior {
|
||||
ignore_block_data: true,
|
||||
@ -215,10 +125,10 @@ mod tests {
|
||||
ignore_entities: true,
|
||||
ignore_air: false,
|
||||
air_as_any: false,
|
||||
threshold: 0.9
|
||||
threshold: 0.9,
|
||||
invalid_nbt: false
|
||||
});
|
||||
|
||||
println!("{:?}", matches);
|
||||
assert_eq!(matches.len(), 1);
|
||||
assert_eq!(matches[0].x, 1);
|
||||
assert_eq!(matches[0].y, 0);
|
||||
@ -228,8 +138,8 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
pub fn test_search_ws() {
|
||||
let schematic = Schematic::load(&PathBuf::from("../tests/warships/GreyFly-by-Bosslar.schem")).unwrap();
|
||||
let pattern = Schematic::load(&PathBuf::from("../tests/gray_castle_complex.schem")).unwrap();
|
||||
let schematic = SpongeSchematic::load(&PathBuf::from("../tests/warships/GreyFly-by-Bosslar.schem")).unwrap();
|
||||
let pattern = SpongeSchematic::load(&PathBuf::from("../tests/gray_castle_complex.schem")).unwrap();
|
||||
|
||||
let matches = search(schematic, &pattern, SearchBehavior {
|
||||
ignore_block_data: false,
|
||||
@ -237,10 +147,10 @@ mod tests {
|
||||
ignore_entities: false,
|
||||
ignore_air: false,
|
||||
air_as_any: false,
|
||||
threshold: 0.9
|
||||
threshold: 0.9,
|
||||
invalid_nbt: false
|
||||
});
|
||||
|
||||
println!("{:?}", matches);
|
||||
assert_eq!(matches.len(), 1);
|
||||
}
|
||||
}
|
||||
|
||||
110
schemsearch-lib/src/nbt_search.rs
Executable file
110
schemsearch-lib/src/nbt_search.rs
Executable file
@ -0,0 +1,110 @@
|
||||
use std::borrow::ToOwned;
|
||||
use std::collections::HashSet;
|
||||
use std::iter::Iterator;
|
||||
use lazy_static::lazy_static;
|
||||
use schemsearch_files::SpongeSchematic;
|
||||
|
||||
const NBT_BLOCKS: &str = include_str!("blocks.txt");
|
||||
|
||||
lazy_static! {
|
||||
static ref NBT_BLOCKS_SET: HashSet<String> = {
|
||||
NBT_BLOCKS.lines().map(|x| format!("minecraft:{}", x)).collect()
|
||||
};
|
||||
}
|
||||
|
||||
pub fn has_invalid_nbt(schem: SpongeSchematic) -> bool {
|
||||
if schem.block_entities.is_empty() && schem.palette.keys().any(|v| NBT_BLOCKS_SET.contains(v)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
let nbt_blocks = schem.palette.iter().filter(|(k, _)| NBT_BLOCKS_SET.contains(k.to_owned())).map(|(_, v)| *v).collect::<HashSet<i32>>();
|
||||
|
||||
for (i, block_entity) in schem.block_data.iter().enumerate() {
|
||||
if nbt_blocks.contains(&*block_entity) {
|
||||
// i = x + z * Width + y * Width * Length
|
||||
let x = i % schem.width as usize;
|
||||
let z = (i / schem.width as usize) % schem.length as usize;
|
||||
let y = i / (schem.width as usize * schem.length as usize);
|
||||
if schem.block_entities.iter().any(|e| !e.pos.eq(&[x as i32, y as i32, z as i32])) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
#[allow(unused_imports)]
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use nbt::CompoundTag;
|
||||
use schemsearch_files::{BlockEntity, SpongeSchematic};
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_has_invalid_nbt() {
|
||||
let schem = SpongeSchematic {
|
||||
data_version: 1,
|
||||
metadata: CompoundTag::new(),
|
||||
width: 0,
|
||||
height: 0,
|
||||
length: 0,
|
||||
offset: [0, 0, 0],
|
||||
palette_max: 1,
|
||||
palette: vec![("minecraft:chest".to_owned(), 1)].into_iter().collect(),
|
||||
block_data: vec![1],
|
||||
block_entities: vec![],
|
||||
entities: None,
|
||||
};
|
||||
|
||||
assert_eq!(has_invalid_nbt(schem), true);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_has_invalid_nbt_2() {
|
||||
let schem = SpongeSchematic {
|
||||
data_version: 1,
|
||||
metadata: CompoundTag::new(),
|
||||
width: 1,
|
||||
height: 1,
|
||||
length: 1,
|
||||
offset: [0, 0, 0],
|
||||
palette_max: 1,
|
||||
palette: vec![("minecraft:chest".to_owned(), 1)].into_iter().collect(),
|
||||
block_data: vec![1],
|
||||
block_entities: vec![
|
||||
BlockEntity {
|
||||
id: "minecraft:chest".to_owned(),
|
||||
pos: [0, 0, 0],
|
||||
}
|
||||
],
|
||||
entities: None,
|
||||
};
|
||||
|
||||
assert_eq!(has_invalid_nbt(schem), false);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_has_invalid_nbt_3() {
|
||||
let schem = SpongeSchematic {
|
||||
data_version: 1,
|
||||
metadata: CompoundTag::new(),
|
||||
width: 2,
|
||||
height: 1,
|
||||
length: 1,
|
||||
offset: [0, 0, 0],
|
||||
palette_max: 1,
|
||||
palette: vec![("minecraft:chest".to_owned(), 1), ("minecraft:stone".to_owned(), 2)].into_iter().collect(),
|
||||
block_data: vec![1, 2],
|
||||
block_entities: vec![
|
||||
BlockEntity {
|
||||
id: "minecraft:chest".to_owned(),
|
||||
pos: [1, 0, 0],
|
||||
}
|
||||
],
|
||||
entities: None,
|
||||
};
|
||||
|
||||
assert_eq!(has_invalid_nbt(schem), true);
|
||||
}
|
||||
}
|
||||
@ -15,11 +15,12 @@
|
||||
* along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
use nbt::Map;
|
||||
use schemsearch_files::Schematic;
|
||||
use std::collections::HashMap;
|
||||
use nbt::CompoundTag;
|
||||
use schemsearch_files::SpongeSchematic;
|
||||
use crate::normalize_data;
|
||||
|
||||
fn create_reverse_palette(schem: &Schematic) -> Vec<&str> {
|
||||
fn create_reverse_palette(schem: &SpongeSchematic) -> Vec<&str> {
|
||||
let mut reverse_palette = Vec::with_capacity(schem.palette_max as usize);
|
||||
(0..schem.palette_max).for_each(|_| reverse_palette.push(""));
|
||||
for (key, value) in schem.palette.iter() {
|
||||
@ -28,15 +29,15 @@ fn create_reverse_palette(schem: &Schematic) -> Vec<&str> {
|
||||
reverse_palette
|
||||
}
|
||||
|
||||
pub fn strip_data(schem: &Schematic) -> Schematic {
|
||||
pub fn strip_data(schem: &SpongeSchematic) -> SpongeSchematic {
|
||||
let mut data: Vec<i32> = Vec::new();
|
||||
|
||||
let mut palette: Map<String, i32> = Map::new();
|
||||
let mut palette: HashMap<String, i32> = HashMap::new();
|
||||
let mut palette_max: i32 = 0;
|
||||
let reverse_palette = create_reverse_palette(schem);
|
||||
|
||||
for block in schem.block_data.iter() {
|
||||
let block_name = reverse_palette[*block as usize].clone();
|
||||
let block_name = reverse_palette[*block as usize];
|
||||
let block_name = block_name.split('[').next().unwrap().to_string();
|
||||
|
||||
let entry = palette.entry(block_name).or_insert_with(|| {
|
||||
@ -47,9 +48,8 @@ pub fn strip_data(schem: &Schematic) -> Schematic {
|
||||
data.push(*entry);
|
||||
}
|
||||
|
||||
Schematic {
|
||||
version: schem.version,
|
||||
data_version: schem.data_version,
|
||||
SpongeSchematic {
|
||||
data_version: 1,
|
||||
palette,
|
||||
palette_max,
|
||||
block_data: data,
|
||||
@ -57,17 +57,17 @@ pub fn strip_data(schem: &Schematic) -> Schematic {
|
||||
height: schem.height,
|
||||
length: schem.length,
|
||||
width: schem.width,
|
||||
metadata: schem.metadata.clone(),
|
||||
offset: schem.offset.clone(),
|
||||
metadata: CompoundTag::new(),
|
||||
offset: [0; 3],
|
||||
entities: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn match_palette_adapt(schem: &Schematic, matching_palette: &Map<String, i32>, ignore_data: bool) -> Vec<i32> {
|
||||
let mut data: Vec<i32> = Vec::new();
|
||||
pub fn match_palette_adapt(schem: &SpongeSchematic, matching_palette: &HashMap<String, i32>, ignore_data: bool) -> Vec<i32> {
|
||||
let mut data = Vec::with_capacity(schem.block_data.len());
|
||||
let reverse_palette = create_reverse_palette(schem);
|
||||
|
||||
for x in &schem.block_data {
|
||||
for x in schem.block_data.as_slice().iter() {
|
||||
let blockname = reverse_palette[*x as usize];
|
||||
let blockname = if ignore_data { normalize_data(blockname, ignore_data) } else { blockname };
|
||||
let block_id = match matching_palette.get(&*blockname) {
|
||||
@ -81,10 +81,10 @@ pub fn match_palette_adapt(schem: &Schematic, matching_palette: &Map<String, i32
|
||||
}
|
||||
|
||||
pub fn match_palette(
|
||||
schem: &Schematic,
|
||||
pattern: &Schematic,
|
||||
schem: &SpongeSchematic,
|
||||
pattern: &SpongeSchematic,
|
||||
ignore_data: bool,
|
||||
) -> Schematic {
|
||||
) -> SpongeSchematic {
|
||||
if ignore_data {
|
||||
match_palette_internal(&strip_data(schem), &strip_data(pattern), ignore_data)
|
||||
} else {
|
||||
@ -93,24 +93,23 @@ pub fn match_palette(
|
||||
}
|
||||
|
||||
fn match_palette_internal(
|
||||
schem: &Schematic,
|
||||
pattern: &Schematic,
|
||||
schem: &SpongeSchematic,
|
||||
pattern: &SpongeSchematic,
|
||||
ignore_data: bool,
|
||||
) -> Schematic {
|
||||
) -> SpongeSchematic {
|
||||
let data_pattern: Vec<i32> = match_palette_adapt(&pattern, &schem.palette, ignore_data);
|
||||
|
||||
Schematic {
|
||||
version: pattern.version.clone(),
|
||||
data_version: pattern.data_version.clone(),
|
||||
SpongeSchematic {
|
||||
data_version: 0,
|
||||
palette: schem.palette.clone(),
|
||||
palette_max: schem.palette_max,
|
||||
block_data: data_pattern,
|
||||
block_entities: pattern.block_entities.clone(),
|
||||
height: pattern.height.clone(),
|
||||
length: pattern.length.clone(),
|
||||
width: pattern.width.clone(),
|
||||
metadata: pattern.metadata.clone(),
|
||||
offset: pattern.offset.clone(),
|
||||
height: pattern.height,
|
||||
length: pattern.length,
|
||||
width: pattern.width,
|
||||
metadata: CompoundTag::new(),
|
||||
offset: [0; 3],
|
||||
entities: None,
|
||||
}
|
||||
}
|
||||
87
schemsearch-lib/src/search.rs
Executable file
87
schemsearch-lib/src/search.rs
Executable file
@ -0,0 +1,87 @@
|
||||
use math::round::ceil;
|
||||
use schemsearch_files::SpongeSchematic;
|
||||
use crate::{Match, SearchBehavior};
|
||||
use crate::pattern_mapper::{match_palette, match_palette_adapt};
|
||||
|
||||
pub fn search(
|
||||
schem: SpongeSchematic,
|
||||
pattern_schem: &SpongeSchematic,
|
||||
search_behavior: SearchBehavior,
|
||||
) -> Vec<Match> {
|
||||
if schem.width < pattern_schem.width || schem.height < pattern_schem.height || schem.length < pattern_schem.length {
|
||||
return Vec::new();
|
||||
}
|
||||
|
||||
if pattern_schem.palette.len() > schem.palette.len() {
|
||||
return Vec::new();
|
||||
}
|
||||
|
||||
let pattern_schem = match_palette(&schem, &pattern_schem, search_behavior.ignore_block_data);
|
||||
|
||||
let mut matches: Vec<Match> = Vec::with_capacity(4);
|
||||
|
||||
let pattern_data = pattern_schem.block_data.as_ptr();
|
||||
|
||||
let schem_data = if search_behavior.ignore_block_data {
|
||||
match_palette_adapt(&schem, &pattern_schem.palette, search_behavior.ignore_block_data)
|
||||
} else {
|
||||
schem.block_data
|
||||
};
|
||||
|
||||
let schem_data = schem_data.as_ptr();
|
||||
|
||||
let air_id = if search_behavior.ignore_air || search_behavior.air_as_any { pattern_schem.palette.get("minecraft:air").unwrap_or(&-1) } else { &-1};
|
||||
|
||||
let pattern_blocks = pattern_schem.block_data.len() as f32;
|
||||
let i_pattern_blocks = pattern_blocks as i32;
|
||||
|
||||
let pattern_width = pattern_schem.width as usize;
|
||||
let pattern_height = pattern_schem.height as usize;
|
||||
let pattern_length = pattern_schem.length as usize;
|
||||
|
||||
let schem_width = schem.width as usize;
|
||||
let schem_height = schem.height as usize;
|
||||
let schem_length = schem.length as usize;
|
||||
|
||||
let skip_amount = ceil((pattern_blocks * (1.0 - search_behavior.threshold)) as f64, 0) as i32;
|
||||
|
||||
for y in 0..=schem_height - pattern_height {
|
||||
for z in 0..=schem_length - pattern_length {
|
||||
for x in 0..=schem_width - pattern_width {
|
||||
let mut not_matching = 0;
|
||||
'outer:
|
||||
for j in 0..pattern_height {
|
||||
for k in 0..pattern_length {
|
||||
'inner:
|
||||
for i in 0..pattern_width {
|
||||
let index = (x + i) + schem_width * ((z + k) + (y + j) * schem_length);
|
||||
let pattern_index = i + pattern_width * (k + j * pattern_length);
|
||||
let data = unsafe { *schem_data.add(index) };
|
||||
let pattern_data = unsafe { *pattern_data.add(pattern_index) };
|
||||
if (search_behavior.ignore_air && data != *air_id) || (search_behavior.air_as_any && pattern_data != *air_id) {
|
||||
continue 'inner;
|
||||
}
|
||||
if data != pattern_data {
|
||||
not_matching += 1;
|
||||
if not_matching >= skip_amount {
|
||||
break 'outer;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if not_matching < skip_amount {
|
||||
matches.push(Match {
|
||||
x: x as u16,
|
||||
y: y as u16,
|
||||
z: z as u16,
|
||||
percent: (i_pattern_blocks - not_matching) as f32 / pattern_blocks,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return matches;
|
||||
}
|
||||
@ -1,13 +1,13 @@
|
||||
[package]
|
||||
name = "schemsearch-sql"
|
||||
version = "0.1.1"
|
||||
version = "0.1.3"
|
||||
edition = "2021"
|
||||
license = "AGPL-3.0-or-later"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
sqlx = { version = "0.6", features = [ "runtime-async-std-native-tls" , "mysql" ] }
|
||||
sqlx = { version = "0.7", features = [ "runtime-async-std-native-tls" , "mysql" ] }
|
||||
|
||||
schemsearch-lib = { path = "../schemsearch-lib" }
|
||||
schemsearch-files = { path = "../schemsearch-files" }
|
||||
Reference in New Issue
Block a user