33 Commits

Author SHA1 Message Date
2a584e878f Fixing... 2024-04-27 22:30:29 +02:00
33f5fe03fe Merge pull request #11 from Chaoscaot/add-invalid-nbt-arg
🔧 Add invalid_nbt flag.
2024-04-27 21:55:13 +02:00
0e6f2c3f78 🔧 Add invalid_nbt flag. 2024-04-27 21:27:42 +02:00
82108d9e36 🛠️ Fix incorrect CSV format in OutputFormat::CSV. (#10) 2024-04-27 20:19:10 +02:00
d20940f89b Improve Performance 2023-08-20 15:37:23 +02:00
e3e6e9f759 Improve Performance 2023-08-09 09:22:24 +02:00
ccae2ba393 Merge pull request #9 from Chaoscaot/dependabot/cargo/sqlx-0.7
Update sqlx requirement from 0.6 to 0.7
2023-07-11 20:48:13 +02:00
6c6c95bedd Update sqlx requirement from 0.6 to 0.7
Updates the requirements on [sqlx](https://github.com/launchbadge/sqlx) to permit the latest version.
- [Changelog](https://github.com/launchbadge/sqlx/blob/main/CHANGELOG.md)
- [Commits](https://github.com/launchbadge/sqlx/compare/v0.6.0...v0.7.0)

---
updated-dependencies:
- dependency-name: sqlx
  dependency-type: direct:production
...

Signed-off-by: dependabot[bot] <support@github.com>
2023-07-10 10:49:15 +00:00
582079c80d Bump Version 2023-05-23 20:10:41 +02:00
e25aeab065 Fix Broken Schematics Loading 2023-05-23 20:07:23 +02:00
aee3a80267 Reduce FLOPs 2023-05-01 11:32:40 +02:00
5107e04497 Update README.md 2023-04-28 00:28:34 +02:00
a357da2ce8 Fix Tests 2023-04-24 22:52:03 +02:00
eb84adb4a3 Reduce Boilerplate 2023-04-24 19:04:44 +02:00
25c4e97f71 Merge branch 'chaoscaot/support_sponge_v1_v3' 2023-04-23 12:19:22 +02:00
7d9e7f28a9 Fix SQL 2023-04-23 12:17:59 +02:00
ef1f8ed38f Merge pull request #8 from Chaoscaot/chaoscaot/support_sponge_v1_v3
Add Support for Sponge Schematic V1 and V3
2023-04-22 21:55:51 +02:00
4671f38591 Bump Version 2023-04-22 21:55:08 +02:00
5cff84538d Fix Performance 2023-04-22 21:29:18 +02:00
9a0b0535c6 remove Serde 2023-04-22 21:03:00 +02:00
a47c2f44bd Something™️ 2023-04-22 16:39:21 +02:00
246927d840 idk what im doing 2023-04-14 17:56:28 +02:00
d1a01dc0c1 Faster but not working 2023-04-13 23:49:39 +02:00
e03a805bdb Something Working :D 2023-04-13 23:16:12 +02:00
9cca860db3 Some new Ideas 2023-04-13 16:16:02 +02:00
80f5191ae8 Merge branch 'master' into chaoscaot/support_sponge_v1_v3 2023-04-13 14:33:59 +02:00
3f20cbc17f Create CODE_OF_CONDUCT.md 2023-04-13 00:25:42 +02:00
733aaa9e72 Update dependabot.yml 2023-04-13 00:21:24 +02:00
14866df17d Create dependabot.yml 2023-04-13 00:20:53 +02:00
00e3d6fd0f Fix Cache 2023-04-05 13:07:14 +02:00
fb8f935617 Fix Cache and Bump Version 2023-04-05 13:05:15 +02:00
2a112ac49c Add Output Limit 2023-04-05 02:43:28 +02:00
a1b5449f06 Some basic tests and basic impls 2023-04-04 12:07:33 +02:00
23 changed files with 840 additions and 275 deletions

6
.github/dependabot.yml vendored Normal file
View File

@ -0,0 +1,6 @@
version: 2
updates:
- package-ecosystem: "cargo"
directory: "/"
schedule:
interval: "weekly"

View File

@ -19,32 +19,36 @@ jobs:
- name: Cache Cargo modules - name: Cache Cargo modules
id: cache-cargo id: cache-cargo
uses: actions/cache@v3 uses: actions/cache@v3
env: continue-on-error: false
cache-name: cache-cargo-target-debug
with: with:
path: target path: |
key: ${{ runner.os }}-${{ env.cache-name }}-${{ hashFiles('**/Cargo.lock') }} ~/.cargo/registry
~/.cargo/git
key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }}
restore-keys: | restore-keys: |
${{ runner.os }}-${{ env.cache-name }}- ${{ runner.os }}-cargo-
- name: Build - name: Build
run: make debug run: make debug
- name: Run tests - name: Run tests
run: cargo test --verbose -p schemsearch-lib run: cargo test --verbose -p schemsearch-lib
build-release: build-release:
needs:
- build
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v3
- name: Cache Cargo modules - name: Cache Cargo modules
id: cache-cargo id: cache-cargo
uses: actions/cache@v3 uses: actions/cache@v3
env: continue-on-error: false
cache-name: cache-cargo-target-release
with: with:
path: target path: |
key: ${{ runner.os }}-${{ env.cache-name }}-${{ hashFiles('**/Cargo.lock') }} ~/.cargo/registry
~/.cargo/git
key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }}
restore-keys: | restore-keys: |
${{ runner.os }}-${{ env.cache-name }}- ${{ runner.os }}-cargo-
- name: Build - name: Build
run: make run: make
- name: Upload a Build Artifact - name: Upload a Build Artifact

View File

@ -22,13 +22,14 @@ jobs:
- name: Cache Cargo modules - name: Cache Cargo modules
id: cache-cargo id: cache-cargo
uses: actions/cache@v3 uses: actions/cache@v3
env: continue-on-error: false
cache-name: cache-cargo-target-release
with: with:
path: target path: |
key: ${{ runner.os }}-${{ env.cache-name }}-${{ hashFiles('**/Cargo.lock') }} ~/.cargo/registry
~/.cargo/git
key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }}
restore-keys: | restore-keys: |
${{ runner.os }}-${{ env.cache-name }}- ${{ runner.os }}-cargo-
- name: Build - name: Build
run: make run: make
- name: Create Tarball - name: Create Tarball

128
CODE_OF_CONDUCT.md Normal file
View File

@ -0,0 +1,128 @@
# Contributor Covenant Code of Conduct
## Our Pledge
We as members, contributors, and leaders pledge to make participation in our
community a harassment-free experience for everyone, regardless of age, body
size, visible or invisible disability, ethnicity, sex characteristics, gender
identity and expression, level of experience, education, socio-economic status,
nationality, personal appearance, race, religion, or sexual identity
and orientation.
We pledge to act and interact in ways that contribute to an open, welcoming,
diverse, inclusive, and healthy community.
## Our Standards
Examples of behavior that contributes to a positive environment for our
community include:
* Demonstrating empathy and kindness toward other people
* Being respectful of differing opinions, viewpoints, and experiences
* Giving and gracefully accepting constructive feedback
* Accepting responsibility and apologizing to those affected by our mistakes,
and learning from the experience
* Focusing on what is best not just for us as individuals, but for the
overall community
Examples of unacceptable behavior include:
* The use of sexualized language or imagery, and sexual attention or
advances of any kind
* Trolling, insulting or derogatory comments, and personal or political attacks
* Public or private harassment
* Publishing others' private information, such as a physical or email
address, without their explicit permission
* Other conduct which could reasonably be considered inappropriate in a
professional setting
## Enforcement Responsibilities
Community leaders are responsible for clarifying and enforcing our standards of
acceptable behavior and will take appropriate and fair corrective action in
response to any behavior that they deem inappropriate, threatening, offensive,
or harmful.
Community leaders have the right and responsibility to remove, edit, or reject
comments, commits, code, wiki edits, issues, and other contributions that are
not aligned to this Code of Conduct, and will communicate reasons for moderation
decisions when appropriate.
## Scope
This Code of Conduct applies within all community spaces, and also applies when
an individual is officially representing the community in public spaces.
Examples of representing our community include using an official e-mail address,
posting via an official social media account, or acting as an appointed
representative at an online or offline event.
## Enforcement
Instances of abusive, harassing, or otherwise unacceptable behavior may be
reported to the community leaders responsible for enforcement at
chaoscaot@zohomail.eu.
All complaints will be reviewed and investigated promptly and fairly.
All community leaders are obligated to respect the privacy and security of the
reporter of any incident.
## Enforcement Guidelines
Community leaders will follow these Community Impact Guidelines in determining
the consequences for any action they deem in violation of this Code of Conduct:
### 1. Correction
**Community Impact**: Use of inappropriate language or other behavior deemed
unprofessional or unwelcome in the community.
**Consequence**: A private, written warning from community leaders, providing
clarity around the nature of the violation and an explanation of why the
behavior was inappropriate. A public apology may be requested.
### 2. Warning
**Community Impact**: A violation through a single incident or series
of actions.
**Consequence**: A warning with consequences for continued behavior. No
interaction with the people involved, including unsolicited interaction with
those enforcing the Code of Conduct, for a specified period of time. This
includes avoiding interactions in community spaces as well as external channels
like social media. Violating these terms may lead to a temporary or
permanent ban.
### 3. Temporary Ban
**Community Impact**: A serious violation of community standards, including
sustained inappropriate behavior.
**Consequence**: A temporary ban from any sort of interaction or public
communication with the community for a specified period of time. No public or
private interaction with the people involved, including unsolicited interaction
with those enforcing the Code of Conduct, is allowed during this period.
Violating these terms may lead to a permanent ban.
### 4. Permanent Ban
**Community Impact**: Demonstrating a pattern of violation of community
standards, including sustained inappropriate behavior, harassment of an
individual, or aggression toward or disparagement of classes of individuals.
**Consequence**: A permanent ban from any sort of public interaction within
the community.
## Attribution
This Code of Conduct is adapted from the [Contributor Covenant][homepage],
version 2.0, available at
https://www.contributor-covenant.org/version/2/0/code_of_conduct.html.
Community Impact Guidelines were inspired by [Mozilla's code of conduct
enforcement ladder](https://github.com/mozilla/diversity).
[homepage]: https://www.contributor-covenant.org
For answers to common questions about this code of conduct, see the FAQ at
https://www.contributor-covenant.org/faq. Translations are available at
https://www.contributor-covenant.org/translations.

1
Cargo.toml Normal file → Executable file
View File

@ -7,6 +7,7 @@ members = [
"schemsearch-sql", "schemsearch-sql",
"schemsearch-java" "schemsearch-java"
] ]
resolver = "2"
[profile.small] [profile.small]
inherits = "release" inherits = "release"

View File

@ -1,5 +1,5 @@
# schemsearch # schemsearch
### A *simple* CLI tool to search in Sponge V2 Schematic files ### A *simple* CLI tool to search in Sponge Schematic files
--- ---
@ -43,7 +43,6 @@ schemsearch-cli --help
## Roadmap ## Roadmap
A list of features that are planned to be implemented in the future. In order of priority. A list of features that are planned to be implemented in the future. In order of priority.
- [ ] Full JSON output (Progressbar)
- [ ] Use AVX2 for faster search - [ ] Use AVX2 for faster search
- [ ] Tile entities data search - [ ] Tile entities data search
- [ ] Entities search - [ ] Entities search

4
schemsearch-cli/Cargo.toml Normal file → Executable file
View File

@ -1,6 +1,6 @@
[package] [package]
name = "schemsearch-cli" name = "schemsearch-cli"
version = "0.1.1" version = "0.1.7"
edition = "2021" edition = "2021"
license = "AGPL-3.0-or-later" license = "AGPL-3.0-or-later"
@ -13,7 +13,7 @@ schemsearch-sql = { path = "../schemsearch-sql", optional = true }
clap = { version = "4.1.8", features = ["cargo"] } clap = { version = "4.1.8", features = ["cargo"] }
futures = { version = "0.3", optional = true } futures = { version = "0.3", optional = true }
sqlx = { version = "0.6", features = [ "runtime-async-std-native-tls" , "mysql" ], optional = true } sqlx = { version = "0.7", features = [ "runtime-async-std-native-tls" , "mysql" ], optional = true }
rayon = "1.7.0" rayon = "1.7.0"
indicatif = { version = "0.17.3", features = ["rayon"] } indicatif = { version = "0.17.3", features = ["rayon"] }
serde = "1.0.157" serde = "1.0.157"

100
schemsearch-cli/src/main.rs Normal file → Executable file
View File

@ -26,8 +26,8 @@ use clap::{command, Arg, ArgAction, ValueHint};
use std::path::PathBuf; use std::path::PathBuf;
use std::str::FromStr; use std::str::FromStr;
use clap::error::ErrorKind; use clap::error::ErrorKind;
use schemsearch_lib::{Match, search, SearchBehavior}; use schemsearch_lib::{Match, SearchBehavior};
use crate::types::{PathSchematicSupplier, SchematicSupplierType}; use crate::types::{PathSchematicSupplier, SchematicSupplier, SchematicSupplierType};
#[cfg(feature = "sql")] #[cfg(feature = "sql")]
use futures::executor::block_on; use futures::executor::block_on;
use rayon::prelude::*; use rayon::prelude::*;
@ -39,9 +39,11 @@ use schemsearch_sql::load_all_schematics;
#[cfg(feature = "sql")] #[cfg(feature = "sql")]
use crate::types::SqlSchematicSupplier; use crate::types::SqlSchematicSupplier;
use indicatif::*; use indicatif::*;
use schemsearch_files::Schematic; use schemsearch_files::SpongeSchematic;
use crate::sinks::{OutputFormat, OutputSink}; use crate::sinks::{OutputFormat, OutputSink};
use crate::stderr::MaschineStdErr; use crate::stderr::MaschineStdErr;
use schemsearch_lib::nbt_search::has_invalid_nbt;
use schemsearch_lib::search::search;
fn main() { fn main() {
#[allow(unused_mut)] #[allow(unused_mut)]
@ -49,8 +51,8 @@ fn main() {
.arg( .arg(
Arg::new("pattern") Arg::new("pattern")
.help("The pattern to search for") .help("The pattern to search for")
.required(true)
.value_hint(ValueHint::FilePath) .value_hint(ValueHint::FilePath)
.required_unless_present("invalid-nbt")
.action(ArgAction::Set), .action(ArgAction::Set),
) )
.arg( .arg(
@ -94,6 +96,13 @@ fn main() {
.long("air-as-any") .long("air-as-any")
.action(ArgAction::SetTrue), .action(ArgAction::SetTrue),
) )
.arg(
Arg::new("invalid-nbt")
.help("Search for Schematics with Invalid or missing NBT data")
.short('I')
.long("invalid-nbt")
.action(ArgAction::SetTrue),
)
.arg( .arg(
Arg::new("output") Arg::new("output")
.help("The output format and path [Format:Path] available formats: text, json, csv; available paths: std, err, (file path)") .help("The output format and path [Format:Path] available formats: text, json, csv; available paths: std, err, (file path)")
@ -134,7 +143,7 @@ fn main() {
) )
.arg( .arg(
Arg::new("threads") Arg::new("threads")
.help("The number of threads to use [0 = Available Threads]") .help("The number of threads to use [0 = all Available Threads]")
.short('T') .short('T')
.long("threads") .long("threads")
.action(ArgAction::Set) .action(ArgAction::Set)
@ -150,6 +159,15 @@ fn main() {
.default_value("0") .default_value("0")
.value_parser(|s: &str| s.parse::<u16>().map_err(|e| e.to_string())) .value_parser(|s: &str| s.parse::<u16>().map_err(|e| e.to_string()))
) )
.arg(
Arg::new("limit")
.help("The maximum number of matches to return [0 = Unlimited]")
.short('l')
.long("limit")
.action(ArgAction::Set)
.default_value("50")
.value_parser(|s: &str| s.parse::<usize>().map_err(|e| e.to_string())),
)
.about("Searches for a pattern in a schematic") .about("Searches for a pattern in a schematic")
.bin_name("schemsearch"); .bin_name("schemsearch");
@ -195,18 +213,22 @@ fn main() {
air_as_any: matches.get_flag("air-as-any"), air_as_any: matches.get_flag("air-as-any"),
ignore_entities: matches.get_flag("ignore-entities"), ignore_entities: matches.get_flag("ignore-entities"),
threshold: *matches.get_one::<f32>("threshold").expect("Couldn't get threshold"), threshold: *matches.get_one::<f32>("threshold").expect("Couldn't get threshold"),
invalid_nbt: matches.get_flag("invalid-nbt"),
}; };
let pattern = match Schematic::load(&PathBuf::from(matches.get_one::<String>("pattern").unwrap())) { let pattern = match matches.get_one::<String>("pattern") {
Ok(x) => x, Some(p) => match SpongeSchematic::load(&PathBuf::from(p)) {
Ok(x) => Some(x),
Err(e) => { Err(e) => {
cmd.error(ErrorKind::Io, format!("Error while loading Pattern: {}", e.to_string())).exit(); cmd.error(ErrorKind::Io, format!("Error while loading Pattern: {}", e.to_string())).exit();
} }
},
None => None,
}; };
let mut schematics: Vec<SchematicSupplierType> = Vec::new(); let mut schematics: Vec<SchematicSupplierType> = Vec::new();
match matches.get_many::<String>("schematic") { match matches.get_many::<String>("schematic") {
None => {}, None => {}
Some(x) => { Some(x) => {
let paths = x.map(|x| PathBuf::from(x)); let paths = x.map(|x| PathBuf::from(x));
for path in paths { for path in paths {
@ -217,12 +239,12 @@ fn main() {
.filter(|x| x.path().is_file()) .filter(|x| x.path().is_file())
.filter(|x| x.path().extension().unwrap().to_str().unwrap() == "schem") .filter(|x| x.path().extension().unwrap().to_str().unwrap() == "schem")
.for_each(|x| { .for_each(|x| {
schematics.push(SchematicSupplierType::PATH(Box::new(PathSchematicSupplier { schematics.push(SchematicSupplierType::PATH(PathSchematicSupplier {
path: x.path(), path: x.path(),
}))) }))
}); });
} else if path.extension().unwrap().to_str().unwrap() == "schem" { } else if path.extension().unwrap().to_str().unwrap() == "schem" {
schematics.push(SchematicSupplierType::PATH(Box::new(PathSchematicSupplier { path }))); schematics.push(SchematicSupplierType::PATH(PathSchematicSupplier { path }));
} }
} }
} }
@ -264,6 +286,8 @@ fn main() {
bar.set_draw_target(ProgressDrawTarget::term_like(Box::new(MaschineStdErr { size: term_size }))) bar.set_draw_target(ProgressDrawTarget::term_like(Box::new(MaschineStdErr { size: term_size })))
} }
let max_matching = *matches.get_one::<usize>("limit").expect("Could not get max-matching");
let matches: Vec<SearchResult> = schematics.par_iter().progress_with(bar).map(|schem| { let matches: Vec<SearchResult> = schematics.par_iter().progress_with(bar).map(|schem| {
match schem { match schem {
SchematicSupplierType::PATH(schem) => { SchematicSupplierType::PATH(schem) => {
@ -271,28 +295,20 @@ fn main() {
Some(x) => x, Some(x) => x,
None => return SearchResult { None => return SearchResult {
name: schem.get_name(), name: schem.get_name(),
matches: Vec::default() matches: Vec::default(),
} }
}; };
SearchResult { search_in_schem(schematic, pattern.as_ref(), search_behavior, schem)
name: schem.get_name(),
matches: search(schematic, &pattern, search_behavior)
}
} }
#[cfg(feature = "sql")] #[cfg(feature = "sql")]
SchematicSupplierType::SQL(schem) => { SchematicSupplierType::SQL(schem) => {
match schem.get_schematic() { match schem.get_schematic() {
Ok(schematic) => { Ok(schematic) => search_in_schem(schematic, pattern.as_ref(), search_behavior, schem),
SearchResult {
name: schem.get_name(),
matches: search(schematic, &pattern, search_behavior)
}
}
Err(e) => { Err(e) => {
eprintln!("Error while loading schematic ({}): {}", schem.get_name(), e.to_string()); eprintln!("Error while loading schematic ({}): {}", schem.get_name(), e.to_string());
SearchResult { SearchResult {
name: schem.get_name(), name: schem.get_name(),
matches: Vec::default() matches: Vec::default(),
} }
} }
} }
@ -300,13 +316,19 @@ fn main() {
} }
}).collect(); }).collect();
for matching in matches { let mut matches_count = 0;
'outer: for matching in matches {
let schem_name = matching.name; let schem_name = matching.name;
let matching = matching.matches; let matching = matching.matches;
for x in matching { for x in matching {
for out in &mut output { for out in &mut output {
write!(out.1, "{}", out.0.found_match(&schem_name, x)).unwrap(); write!(out.1, "{}", out.0.found_match(&schem_name, x)).unwrap();
} }
matches_count += 1;
if max_matching != 0 && matches_count >= max_matching {
break 'outer;
}
} }
} }
@ -317,8 +339,34 @@ fn main() {
} }
} }
fn load_schem(schem_path: &PathBuf) -> Option<Schematic> { fn search_in_schem(schematic: SpongeSchematic, pattern: Option<&SpongeSchematic>, search_behavior: SearchBehavior, schem: &impl SchematicSupplier) -> SearchResult {
match Schematic::load(schem_path) { if search_behavior.invalid_nbt {
if has_invalid_nbt(schematic) {
SearchResult {
name: schem.get_name(),
matches: vec![Match {
x: 0,
y: 0,
z: 0,
percent: 1.0,
}],
}
} else {
SearchResult {
name: schem.get_name(),
matches: vec![],
}
}
} else {
SearchResult {
name: schem.get_name(),
matches: search(schematic, pattern.unwrap(), search_behavior),
}
}
}
fn load_schem(schem_path: &PathBuf) -> Option<SpongeSchematic> {
match SpongeSchematic::load(schem_path) {
Ok(x) => Some(x), Ok(x) => Some(x),
Err(e) => { Err(e) => {
println!("Error while loading schematic ({}): {}", schem_path.to_str().unwrap(), e.to_string()); println!("Error while loading schematic ({}): {}", schem_path.to_str().unwrap(), e.to_string());

2
schemsearch-cli/src/sinks.rs Normal file → Executable file
View File

@ -71,7 +71,7 @@ impl OutputFormat {
pub fn start(&self, total: u32, search_behavior: &SearchBehavior, start_time: u128) -> String { pub fn start(&self, total: u32, search_behavior: &SearchBehavior, start_time: u128) -> String {
match self { match self {
OutputFormat::Text => format!("Starting search in {} schematics\n", total), OutputFormat::Text => format!("Starting search in {} schematics\n", total),
OutputFormat::CSV => format!("Name,X,Y,Z,Percent\n"), OutputFormat::CSV => "Name,X,Y,Z,Percent\n".to_owned(),
OutputFormat::JSON => format!("{}\n", serde_json::to_string(&JsonEvent::Init(InitEvent { OutputFormat::JSON => format!("{}\n", serde_json::to_string(&JsonEvent::Init(InitEvent {
total, total,
search_behavior: search_behavior.clone(), search_behavior: search_behavior.clone(),

29
schemsearch-cli/src/types.rs Normal file → Executable file
View File

@ -15,26 +15,32 @@
* along with this program. If not, see <https://www.gnu.org/licenses/>. * along with this program. If not, see <https://www.gnu.org/licenses/>.
*/ */
#[cfg(feature = "sql")]
use std::io::Cursor;
use std::path::PathBuf; use std::path::PathBuf;
#[cfg(feature = "sql")] #[cfg(feature = "sql")]
use futures::executor::block_on; use futures::executor::block_on;
#[allow(unused_imports)] #[cfg(feature = "sql")]
use schemsearch_files::Schematic; use schemsearch_files::SpongeSchematic;
#[cfg(feature = "sql")] #[cfg(feature = "sql")]
use schemsearch_sql::{load_schemdata, SchematicNode}; use schemsearch_sql::{load_schemdata, SchematicNode};
pub enum SchematicSupplierType { pub enum SchematicSupplierType {
PATH(Box<PathSchematicSupplier>), PATH(PathSchematicSupplier),
#[cfg(feature = "sql")] #[cfg(feature = "sql")]
SQL(SqlSchematicSupplier), SQL(SqlSchematicSupplier),
} }
pub trait SchematicSupplier {
fn get_name(&self) -> String;
}
pub struct PathSchematicSupplier { pub struct PathSchematicSupplier {
pub path: PathBuf, pub path: PathBuf,
} }
impl PathSchematicSupplier { impl SchematicSupplier for PathSchematicSupplier {
pub fn get_name(&self) -> String { fn get_name(&self) -> String {
self.path.file_stem().unwrap().to_str().unwrap().to_string() self.path.file_stem().unwrap().to_str().unwrap().to_string()
} }
} }
@ -46,12 +52,17 @@ pub struct SqlSchematicSupplier {
#[cfg(feature = "sql")] #[cfg(feature = "sql")]
impl SqlSchematicSupplier { impl SqlSchematicSupplier {
pub fn get_schematic(&self) -> Result<Schematic, String> { pub fn get_schematic(&self) -> Result<SpongeSchematic, String> {
let schemdata = block_on(load_schemdata(self.node.id)); let mut schemdata = block_on(load_schemdata(self.node.id));
Schematic::load_data(schemdata.as_slice()) SpongeSchematic::load_data(&mut Cursor::new(schemdata.as_mut_slice()))
}
} }
pub fn get_name(&self) -> String { #[cfg(feature = "sql")]
impl SchematicSupplier for SqlSchematicSupplier {
fn get_name(&self) -> String {
format!("{} ({})", self.node.name, self.node.id) format!("{} ({})", self.node.name, self.node.id)
} }
} }

View File

@ -1,6 +1,6 @@
[package] [package]
name = "schemsearch_faster" name = "schemsearch_faster"
version = "0.1.1" version = "0.1.3"
edition = "2021" edition = "2021"
license = "AGPL-3.0-or-later" license = "AGPL-3.0-or-later"

View File

@ -16,9 +16,9 @@
*/ */
use nbt::Map; use nbt::Map;
use schemsearch_files::Schematic; use schemsearch_files::SpongeV2Schematic;
pub fn convert_to_search_space(schem: &Schematic, palette: &Vec<String>) -> Vec<Vec<u8>> { pub fn convert_to_search_space(schem: &SpongeV2Schematic, palette: &Vec<String>) -> Vec<Vec<u8>> {
let mut data: Vec<Vec<u8>> = Vec::with_capacity(palette.len()); let mut data: Vec<Vec<u8>> = Vec::with_capacity(palette.len());
let block_data = &schem.block_data; let block_data = &schem.block_data;
for name in palette { for name in palette {
@ -48,26 +48,26 @@ pub fn unwrap_palette(palette: &Map<String, i32>) -> Vec<String> {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use schemsearch_files::Schematic; use schemsearch_files::SpongeV2Schematic;
use crate::{convert_to_search_space, unwrap_palette}; use crate::{convert_to_search_space, unwrap_palette};
//#[test] //#[test]
pub fn test() { pub fn test() {
let schematic = Schematic::load(&PathBuf::from("../tests/Pattern.schem")).unwrap(); let schematic = SpongeV2Schematic::load(&PathBuf::from("../tests/Pattern.schem")).unwrap();
dbg!(convert_to_search_space(&schematic, &unwrap_palette(&schematic.palette))); dbg!(convert_to_search_space(&schematic, &unwrap_palette(&schematic.palette)));
} }
//#[test] //#[test]
pub fn test_2() { pub fn test_2() {
let schematic = Schematic::load(&PathBuf::from("../tests/Pattern.schem")).unwrap(); let schematic = SpongeV2Schematic::load(&PathBuf::from("../tests/Pattern.schem")).unwrap();
let schematic2 = Schematic::load(&PathBuf::from("../tests/Random.schem")).unwrap(); let schematic2 = SpongeV2Schematic::load(&PathBuf::from("../tests/Random.schem")).unwrap();
println!("{:?}", convert_to_search_space(&schematic2, &unwrap_palette(&schematic.palette))); println!("{:?}", convert_to_search_space(&schematic2, &unwrap_palette(&schematic.palette)));
} }
//#[test] //#[test]
pub fn test_big() { pub fn test_big() {
let schematic = Schematic::load(&PathBuf::from("../tests/endstone.schem")).unwrap(); let schematic = SpongeV2Schematic::load(&PathBuf::from("../tests/endstone.schem")).unwrap();
let schematic2 = Schematic::load(&PathBuf::from("../tests/simple.schem")).unwrap(); let schematic2 = SpongeV2Schematic::load(&PathBuf::from("../tests/simple.schem")).unwrap();
let _ = convert_to_search_space(&schematic2, &unwrap_palette(&schematic.palette)); let _ = convert_to_search_space(&schematic2, &unwrap_palette(&schematic.palette));
} }
} }

View File

@ -1,10 +1,11 @@
[package] [package]
name = "schemsearch-files" name = "schemsearch-files"
version = "0.1.1" version = "0.1.5"
edition = "2021" edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies] [dependencies]
hematite-nbt = "0.5.2" flate2 = "1.0.25"
serde = "1.0.152" named-binary-tag = "0.6"

View File

@ -15,81 +15,176 @@
* along with this program. If not, see <https://www.gnu.org/licenses/>. * along with this program. If not, see <https://www.gnu.org/licenses/>.
*/ */
use std::collections::hash_map::HashMap;
use std::io::Read; use std::io::Read;
use std::path::PathBuf; use std::path::PathBuf;
use nbt::{Map, Value}; use nbt::{CompoundTag, Tag};
use serde::{Deserialize, Deserializer, Serialize};
#[derive(Serialize, Deserialize, Debug)] #[derive(Clone, Debug)]
pub struct Schematic { pub struct SpongeSchematic {
#[serde(rename = "Version")]
pub version: i32,
#[serde(rename = "DataVersion")]
pub data_version: i32, pub data_version: i32,
#[serde(rename = "Metadata")] pub metadata: CompoundTag,
pub metadata: Map<String, Value>,
#[serde(rename = "Width")]
pub width: u16, pub width: u16,
#[serde(rename = "Height")]
pub height: u16, pub height: u16,
#[serde(rename = "Length")]
pub length: u16, pub length: u16,
#[serde(rename = "Offset")]
pub offset: [i32; 3], pub offset: [i32; 3],
#[serde(rename = "PaletteMax")]
pub palette_max: i32, pub palette_max: i32,
#[serde(rename = "Palette")] pub palette: HashMap<String, i32>,
pub palette: Map<String, i32>,
#[serde(rename = "BlockData", deserialize_with = "read_blockdata")]
pub block_data: Vec<i32>, pub block_data: Vec<i32>,
#[serde(rename = "BlockEntities")]
pub block_entities: Vec<BlockEntity>, pub block_entities: Vec<BlockEntity>,
#[serde(rename = "Entities")]
pub entities: Option<Vec<Entity>>, pub entities: Option<Vec<Entity>>,
} }
fn read_blockdata<'de, D>(deserializer: D) -> Result<Vec<i32>, D::Error> #[derive(Clone, Debug)]
where pub struct BlockContainer {
D: Deserializer<'de>, pub palette: HashMap<String, i32>,
{ pub block_data: Vec<i32>,
let s: Vec<i8> = Deserialize::deserialize(deserializer)?; pub block_entities: Vec<BlockEntity>,
Ok(read_varint_array(&s))
} }
#[derive(Serialize, Deserialize, Debug, Clone)] #[derive(Debug, Clone)]
pub struct BlockEntity { pub struct BlockEntity {
#[serde(rename = "Id")]
pub id: String, pub id: String,
#[serde(rename = "Pos")]
pub pos: [i32; 3], pub pos: [i32; 3],
} }
#[derive(Serialize, Deserialize, Debug, Clone)] #[derive(Debug, Clone)]
pub struct BlockEntityV3 {
pub id: String,
pub pos: [i32; 3],
pub data: HashMap<String, Tag>,
}
#[derive(Debug, Clone)]
pub struct Entity { pub struct Entity {
#[serde(rename = "Id")]
pub id: String, pub id: String,
#[serde(rename = "Pos")]
pub pos: [i32; 3], pub pos: [i32; 3],
} }
impl Schematic { impl SpongeSchematic {
pub fn load_data<R>(data: R) -> Result<Schematic, String> where R: Read { pub fn load_data<R>(data: &mut R) -> Result<SpongeSchematic, String> where R: Read {
let schematic: Schematic = match nbt::from_gzip_reader(data) { let nbt: CompoundTag = nbt::decode::read_gzip_compound_tag(data).map_err(|e| e.to_string())?;
Ok(schem) => schem, let version = nbt.get_i32("Version").unwrap_or_else(|_| {
Err(e) => return Err(format!("Failed to parse schematic: {}", e)) return if nbt.contains_key("Blocks") {
3
} else if nbt.contains_key("BlockEntities") {
2
} else if nbt.contains_key("TileEntities") {
1
} else {
-1
}; };
Ok(schematic) });
}
pub fn load(path: &PathBuf) -> Result<Schematic, String> { match version {
let file = match std::fs::File::open(path) { 1 => SpongeSchematic::from_nbt_1(nbt),
Ok(x) => x, 2 => SpongeSchematic::from_nbt_2(nbt),
Err(_) => return Err(format!("Failed to open file: {}", path.to_str().unwrap())) 3 => SpongeSchematic::from_nbt_3(nbt),
}; _ => Err("Invalid schematic: Unknown Version".to_string()),
Schematic::load_data(file)
} }
} }
pub fn load(path: &PathBuf) -> Result<SpongeSchematic, String> {
let mut file = std::fs::File::open(path).map_err(|e| e.to_string())?;
Self::load_data(&mut file)
}
pub fn from_nbt_1(nbt: CompoundTag) -> Result<Self, String> {
Ok(Self {
data_version: 0,
metadata: nbt.get_compound_tag("Metadata").map_err(|e| e.to_string())?.clone(),
width: nbt.get_i16("Width").map_err(|e| e.to_string())? as u16,
height: nbt.get_i16("Height").map_err(|e| e.to_string())? as u16,
length: nbt.get_i16("Length").map_err(|e| e.to_string())? as u16,
offset: read_offset(nbt.get_i32_vec("Offset").map_err(|e| e.to_string())?)?,
palette_max: nbt.get_i32("PaletteMax").map_err(|e| e.to_string())?,
palette: read_palette(nbt.get_compound_tag("Palette").map_err(|e| e.to_string())?),
block_data: read_blocks(nbt.get_i8_vec("BlockData").map_err(|e| e.to_string())?),
block_entities: read_tile_entities(nbt.get_compound_tag_vec("TileEntities").unwrap_or_else(|_| vec![]))?,
entities: None,
})
}
pub fn from_nbt_2(nbt: CompoundTag) -> Result<Self, String> {
Ok(Self{
data_version: nbt.get_i32("DataVersion").map_err(|e| e.to_string())?,
metadata: nbt.get_compound_tag("Metadata").map_err(|e| e.to_string())?.clone(),
width: nbt.get_i16("Width").map_err(|e| e.to_string())? as u16,
height: nbt.get_i16("Height").map_err(|e| e.to_string())? as u16,
length: nbt.get_i16("Length").map_err(|e| e.to_string())? as u16,
offset: read_offset(nbt.get_i32_vec("Offset").map_err(|e| e.to_string())?)?,
palette_max: nbt.get_i32("PaletteMax").map_err(|e| e.to_string())?,
palette: read_palette(nbt.get_compound_tag("Palette").map_err(|e| e.to_string())?),
block_data: read_blocks(nbt.get_i8_vec("BlockData").map_err(|e| e.to_string())?),
block_entities: read_tile_entities(nbt.get_compound_tag_vec("BlockEntities").unwrap_or_else(|_| vec![]))?,
entities: None,
})
}
pub fn from_nbt_3(nbt: CompoundTag) -> Result<Self, String> {
let blocks = nbt.get_compound_tag("Blocks").map_err(|e| e.to_string())?;
Ok(Self{
data_version: nbt.get_i32("DataVersion").map_err(|e| e.to_string())?,
metadata: nbt.get_compound_tag("Metadata").map_err(|e| e.to_string())?.clone(),
width: nbt.get_i16("Width").map_err(|e| e.to_string())? as u16,
height: nbt.get_i16("Height").map_err(|e| e.to_string())? as u16,
length: nbt.get_i16("Length").map_err(|e| e.to_string())? as u16,
offset: read_offset(nbt.get_i32_vec("Offset").map_err(|e| e.to_string())?)?,
palette_max: compute_palette_max(blocks.get_compound_tag("Palette").map_err(|e| e.to_string())?),
palette: read_palette(blocks.get_compound_tag("Palette").map_err(|e| e.to_string())?),
block_data: read_blocks(blocks.get_i8_vec("BlockData").map_err(|e| e.to_string())?),
block_entities: read_tile_entities(blocks.get_compound_tag_vec("BlockEntities").unwrap_or_else(|_| vec![]))?,
entities: None,
})
}
}
fn read_tile_entities(tag: Vec<&CompoundTag>) -> Result<Vec<BlockEntity>, String> {
let mut tile_entities = Vec::new();
for t in tag {
tile_entities.push(BlockEntity {
id: t.get_str("Id").map_err(|e| e.to_string())?.to_string(),
pos: read_offset(t.get("Pos").map_err(|e| e.to_string())?)?,
});
}
Ok(tile_entities)
}
#[inline]
fn read_offset(offset: &Vec<i32>) -> Result<[i32; 3], String> {
match offset.len() {
3 => Ok([offset[0], offset[1], offset[2]]),
_ => Err("Invalid schematic: read_offset wrong length".to_string()),
}
}
#[inline]
fn read_palette(p: &CompoundTag) -> HashMap<String, i32> {
let mut palette = HashMap::new();
for (key, value) in p.iter() {
match value {
Tag::Int(n) => { palette.insert(key.clone(), *n); },
_ => {},
};
}
palette
}
#[inline]
fn compute_palette_max(palette: &CompoundTag) -> i32 {
palette.iter().map(|(_, v)| v).filter_map(|v| match v {
Tag::Int(n) => Some(*n),
_ => None,
}).max().unwrap_or(0)
}
#[inline]
fn read_blocks(blockdata: &Vec<i8>) -> Vec<i32> {
read_varint_array(blockdata)
}
#[inline]
pub fn read_varint_array(read: &Vec<i8>) -> Vec<i32> { pub fn read_varint_array(read: &Vec<i8>) -> Vec<i32> {
let mut data = Vec::new(); let mut data = Vec::new();
let mut value: i32 = 0; let mut value: i32 = 0;

View File

@ -1,6 +1,6 @@
[package] [package]
name = "schemsearch-java" name = "schemsearch-java"
version = "0.1.1" version = "0.1.3"
edition = "2021" edition = "2021"
license = "AGPL-3.0-or-later" license = "AGPL-3.0-or-later"

View File

@ -21,7 +21,7 @@ use jni::JNIEnv;
use jni::objects::{JClass, JString}; use jni::objects::{JClass, JString};
use jni::sys::jstring; use jni::sys::jstring;
use schemsearch_files::Schematic; use schemsearch_files::SpongeV2Schematic;
use schemsearch_lib::{search, SearchBehavior}; use schemsearch_lib::{search, SearchBehavior};
#[no_mangle] #[no_mangle]
@ -32,8 +32,8 @@ pub extern "system" fn Java_SchemSearch_search<'local>(mut env: JNIEnv<'local>,
pattern_path: JString<'local>) -> jstring { pattern_path: JString<'local>) -> jstring {
let schematic_path: String = env.get_string(&schematic_path).expect("Couldn't get java string!").into(); let schematic_path: String = env.get_string(&schematic_path).expect("Couldn't get java string!").into();
let pattern_path: String = env.get_string(&pattern_path).expect("Couldn't get java string!").into(); let pattern_path: String = env.get_string(&pattern_path).expect("Couldn't get java string!").into();
let schematic = Schematic::load(&PathBuf::from(&schematic_path)).unwrap(); let schematic = SpongeV2Schematic::load(&PathBuf::from(&schematic_path)).unwrap();
let pattern = Schematic::load(&PathBuf::from(&pattern_path)).unwrap(); let pattern = SpongeV2Schematic::load(&PathBuf::from(&pattern_path)).unwrap();
let matches = search(schematic, &pattern, SearchBehavior { let matches = search(schematic, &pattern, SearchBehavior {
ignore_block_data: true, ignore_block_data: true,

8
schemsearch-lib/Cargo.toml Normal file → Executable file
View File

@ -1,12 +1,14 @@
[package] [package]
name = "schemsearch-lib" name = "schemsearch-lib"
version = "0.1.1" version = "0.1.7"
edition = "2021" edition = "2021"
license = "AGPL-3.0-or-later" license = "AGPL-3.0-or-later"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies] [dependencies]
hematite-nbt = "0.5.2" serde = { version = "1.0.160", features = ["derive"] }
serde = "1.0.152"
schemsearch-files = { path = "../schemsearch-files" } schemsearch-files = { path = "../schemsearch-files" }
named-binary-tag = "0.6"
libmath = "0.2.1"
lazy_static = "1.4.0"

163
schemsearch-lib/src/blocks.txt Executable file
View File

@ -0,0 +1,163 @@
oak_sign
oak_wall_sign
oak_hanging_sign
oak_wall_hanging_sign
birch_sign
birch_wall_sign
birch_hanging_sign
birch_wall_hanging_sign
spruce_sign
spruce_wall_sign
spruce_hanging_sign
spruce_wall_hanging_sign
jungle_sign
jungle_wall_sign
jungle_hanging_sign
jungle_wall_hanging_sign
dark_oak_sign
dark_oak_wall_sign
dark_oak_hanging_sign
dark_oak_wall_hanging_sign
acacia_sign
acacia_wall_sign
acacia_hanging_sign
acacia_wall_hanging_sign
mangrove_sign
mangrove_wall_sign
mangrove_hanging_sign
mangrove_wall_hanging_sign
cherry_sign
cherry_wall_sign
cherry_hanging_sign
cherry_wall_hanging_sign
bamboo_sign
bamboo_wall_sign
bamboo_hanging_sign
bamboo_wall_hanging_sign
warped_sign
warped_wall_sign
warped_hanging_sign
warped_wall_hanging_sign
crimson_sign
crimson_wall_sign
crimson_hanging_sign
crimson_wall_hanging_sign
suspicious_gravel
suspicious_sand
white_banner
light_gray_banner
gray_banner
black_banner
brown_banner
red_banner
orange_banner
yellow_banner
lime_banner
green_banner
cyan_banner
light_blue_banner
blue_banner
purple_banner
magenta_banner
pink_banner
white_wall_banner
light_gray_wall_banner
gray_wall_banner
black_wall_banner
brown_wall_banner
red_wall_banner
orange_wall_banner
yellow_wall_banner
lime_wall_banner
green_wall_banner
cyan_wall_banner
light_blue_wall_banner
blue_wall_banner
purple_wall_banner
magenta_wall_banner
pink_wall_banner
white_bed
light_gray_bed
gray_bed
black_bed
brown_bed
red_bed
orange_bed
yellow_bed
lime_bed
green_bed
cyan_bed
light_blue_bed
blue_bed
purple_bed
magenta_bed
pink_bed
shulker_box
white_shulker_box
light_gray_shulker_box
gray_shulker_box
black_shulker_box
brown_shulker_box
red_shulker_box
orange_shulker_box
yellow_shulker_box
lime_shulker_box
green_shulker_box
cyan_shulker_box
light_blue_shulker_box
blue_shulker_box
purple_shulker_box
magenta_shulker_box
pink_shulker_box
furnace
blast_furnace
smoker
chest
trapped_chest
ender_chest
enchanting_table
barrel
lectern
jukebox
bell
brewing_stand
bee_nest
beehive
decorated_pot
beacon
conduit
campfire
soul_campfire
redstone_comparator
hopper
dispenser
dropper
moving_piston
daylight_detector
sculk_sensor
calibrated_sculk_sensor
sculk_catalyst
sculk_shrieker
player_head
player_wall_head
wither_skeleton_skull
wither_skeleton_wall_skull
zombie_head
zombie_wall_head
skeleton_skull
skeleton_wall_skull
creeper_head
creeper_wall_head
piglin_head
piglin_wall_head
dragon_head
dragon_wall_head
chiseled_bookshelf
command_block
chain_command_block
repeating_command_block
structure_block
jigsaw_block
end_portal
end_gateway
monster_spawner

148
schemsearch-lib/src/lib.rs Normal file → Executable file
View File

@ -16,11 +16,10 @@
*/ */
pub mod pattern_mapper; pub mod pattern_mapper;
pub mod search;
pub mod nbt_search;
use serde::{Deserialize, Serialize}; use serde::{Serialize, Deserialize};
use pattern_mapper::match_palette;
use schemsearch_files::Schematic;
use crate::pattern_mapper::match_palette_adapt;
#[derive(Debug, Clone, Copy, Deserialize, Serialize)] #[derive(Debug, Clone, Copy, Deserialize, Serialize)]
pub struct SearchBehavior { pub struct SearchBehavior {
@ -30,81 +29,10 @@ pub struct SearchBehavior {
pub air_as_any: bool, pub air_as_any: bool,
pub ignore_entities: bool, pub ignore_entities: bool,
pub threshold: f32, pub threshold: f32,
pub invalid_nbt: bool,
} }
pub fn search( #[derive(Debug, Clone, Copy, Default, Deserialize, Serialize)]
schem: Schematic,
pattern_schem: &Schematic,
search_behavior: SearchBehavior,
) -> Vec<Match> {
if schem.width < pattern_schem.width || schem.height < pattern_schem.height || schem.length < pattern_schem.length {
return vec![];
}
if pattern_schem.palette.len() > schem.palette.len() {
return vec![];
}
let pattern_schem = match_palette(&schem, &pattern_schem, search_behavior.ignore_block_data);
let mut matches: Vec<Match> = Vec::new();
let pattern_data = pattern_schem.block_data.as_slice();
let schem_data = if search_behavior.ignore_block_data {
match_palette_adapt(&schem, &pattern_schem.palette, search_behavior.ignore_block_data)
} else {
schem.block_data
};
let schem_data = schem_data.as_slice();
let air_id = if search_behavior.ignore_air || search_behavior.air_as_any { pattern_schem.palette.get("minecraft:air").unwrap_or(&-1) } else { &-1};
let pattern_blocks = pattern_data.len() as f32;
let pattern_width = pattern_schem.width as usize;
let pattern_height = pattern_schem.height as usize;
let pattern_length = pattern_schem.length as usize;
let schem_width = schem.width as usize;
let schem_height = schem.height as usize;
let schem_length = schem.length as usize;
for y in 0..=schem_height - pattern_height {
for z in 0..=schem_length - pattern_length {
for x in 0..=schem_width - pattern_width {
let mut matching = 0;
for j in 0..pattern_height {
for k in 0..pattern_length {
for i in 0..pattern_width {
let index = (x + i) + schem_width * ((z + k) + (y + j) * schem_length);
let pattern_index = i + pattern_width * (k + j * pattern_length);
let data = unsafe {schem_data.get_unchecked(index) };
let pattern_data = unsafe { pattern_data.get_unchecked(pattern_index) };
if *data == *pattern_data || (search_behavior.ignore_air && *data == *air_id) || (search_behavior.air_as_any && *pattern_data == *air_id) {
matching += 1;
}
}
}
}
let matching_percent = matching as f32 / pattern_blocks;
if matching_percent >= search_behavior.threshold {
matches.push(Match {
x: x as u16,
y: y as u16,
z: z as u16,
percent: matching_percent,
});
}
}
}
}
return matches;
}
#[derive(Debug, Clone, Copy, Deserialize, Serialize)]
pub struct Match { pub struct Match {
pub x: u16, pub x: u16,
pub y: u16, pub y: u16,
@ -112,17 +40,6 @@ pub struct Match {
pub percent: f32, pub percent: f32,
} }
impl Default for Match {
fn default() -> Self {
Self {
x: 0,
y: 0,
z: 0,
percent: 0.0,
}
}
}
#[inline] #[inline]
pub fn normalize_data(data: &str, ignore_data: bool) -> &str { pub fn normalize_data(data: &str, ignore_data: bool) -> &str {
if ignore_data { if ignore_data {
@ -132,42 +49,34 @@ pub fn normalize_data(data: &str, ignore_data: bool) -> &str {
} }
} }
pub fn parse_schematic(data: &Vec<u8>) -> Schematic {
if data[0] == 0x1f && data[1] == 0x8b {
// gzip
nbt::from_gzip_reader(data.as_slice()).unwrap()
} else {
// uncompressed
nbt::from_reader(data.as_slice()).unwrap()
}
}
#[allow(unused_imports)] #[allow(unused_imports)]
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use schemsearch_files::Schematic; use schemsearch_files::SpongeSchematic;
use crate::pattern_mapper::strip_data; use crate::pattern_mapper::{match_palette, strip_data};
use crate::search::search;
use super::*; use super::*;
#[test] #[test]
fn read_schematic() { fn read_schematic() {
let schematic = Schematic::load(&PathBuf::from("../tests/simple.schem")).unwrap(); let schematic = SpongeSchematic::load(&PathBuf::from("../tests/simple.schem")).unwrap();
assert_eq!(schematic.width as usize * schematic.height as usize * schematic.length as usize, schematic.block_data.len()); assert_eq!(schematic.width as usize * schematic.height as usize * schematic.length as usize, schematic.block_data.len());
assert_eq!(schematic.palette_max, schematic.palette.len() as i32); assert_eq!(schematic.palette_max, schematic.palette.len() as i32);
} }
#[test] #[test]
fn test_parse_function() { fn test_parse_function() {
let file = std::fs::File::open("../tests/simple.schem").expect("Failed to open file"); let schematic = SpongeSchematic::load(&PathBuf::from("../tests/simple.schem")).unwrap();
let schematic: Schematic = parse_schematic(&std::io::Read::bytes(file).map(|b| b.unwrap()).collect());
assert_eq!(schematic.width as usize * schematic.height as usize * schematic.length as usize, schematic.block_data.len()); assert_eq!(schematic.width as usize * schematic.height as usize * schematic.length as usize, schematic.block_data.len());
assert_eq!(schematic.palette_max, schematic.palette.len() as i32); assert_eq!(schematic.palette_max, schematic.palette.len() as i32);
} }
#[test] #[test]
fn test_strip_schem() { fn test_strip_schem() {
let schematic = Schematic::load(&PathBuf::from("../tests/simple.schem")).unwrap(); let schematic = SpongeSchematic::load(&PathBuf::from("../tests/simple.schem")).unwrap();
let stripped = strip_data(&schematic); let stripped = strip_data(&schematic);
assert_eq!(stripped.palette.keys().any(|k| k.contains('[')), false); assert_eq!(stripped.palette.keys().any(|k| k.contains('[')), false);
@ -175,24 +84,24 @@ mod tests {
#[test] #[test]
fn test_match_palette() { fn test_match_palette() {
let schematic = Schematic::load(&PathBuf::from("../tests/simple.schem")).unwrap(); let schematic = SpongeSchematic::load(&PathBuf::from("../tests/simple.schem")).unwrap();
let endstone = Schematic::load(&PathBuf::from("../tests/endstone.schem")).unwrap(); let endstone = SpongeSchematic::load(&PathBuf::from("../tests/endstone.schem")).unwrap();
let _ = match_palette(&schematic, &endstone, true); let _ = match_palette(&schematic, &endstone, true);
} }
#[test] #[test]
fn test_match_palette_ignore_data() { fn test_match_palette_ignore_data() {
let schematic = Schematic::load(&PathBuf::from("../tests/simple.schem")).unwrap(); let schematic = SpongeSchematic::load(&PathBuf::from("../tests/simple.schem")).unwrap();
let endstone = Schematic::load(&PathBuf::from("../tests/endstone.schem")).unwrap(); let endstone = SpongeSchematic::load(&PathBuf::from("../tests/endstone.schem")).unwrap();
let _ = match_palette(&schematic, &endstone, false); let _ = match_palette(&schematic, &endstone, false);
} }
#[test] #[test]
pub fn test_big_search() { pub fn test_big_search() {
let schematic = Schematic::load(&PathBuf::from("../tests/simple.schem")).unwrap(); let schematic = SpongeSchematic::load(&PathBuf::from("../tests/simple.schem")).unwrap();
let endstone = Schematic::load(&PathBuf::from("../tests/endstone.schem")).unwrap(); let endstone = SpongeSchematic::load(&PathBuf::from("../tests/endstone.schem")).unwrap();
let _ = search(schematic, &endstone, SearchBehavior { let _ = search(schematic, &endstone, SearchBehavior {
ignore_block_data: true, ignore_block_data: true,
@ -200,14 +109,15 @@ mod tests {
ignore_entities: true, ignore_entities: true,
ignore_air: false, ignore_air: false,
air_as_any: false, air_as_any: false,
threshold: 0.9 threshold: 0.9,
invalid_nbt: false
}); });
} }
#[test] #[test]
pub fn test_search() { pub fn test_search() {
let schematic = Schematic::load(&PathBuf::from("../tests/Random.schem")).unwrap(); let schematic = SpongeSchematic::load(&PathBuf::from("../tests/Random.schem")).unwrap();
let pattern = Schematic::load(&PathBuf::from("../tests/Pattern.schem")).unwrap(); let pattern = SpongeSchematic::load(&PathBuf::from("../tests/Pattern.schem")).unwrap();
let matches = search(schematic, &pattern, SearchBehavior { let matches = search(schematic, &pattern, SearchBehavior {
ignore_block_data: true, ignore_block_data: true,
@ -215,10 +125,10 @@ mod tests {
ignore_entities: true, ignore_entities: true,
ignore_air: false, ignore_air: false,
air_as_any: false, air_as_any: false,
threshold: 0.9 threshold: 0.9,
invalid_nbt: false
}); });
println!("{:?}", matches);
assert_eq!(matches.len(), 1); assert_eq!(matches.len(), 1);
assert_eq!(matches[0].x, 1); assert_eq!(matches[0].x, 1);
assert_eq!(matches[0].y, 0); assert_eq!(matches[0].y, 0);
@ -228,8 +138,8 @@ mod tests {
#[test] #[test]
pub fn test_search_ws() { pub fn test_search_ws() {
let schematic = Schematic::load(&PathBuf::from("../tests/warships/GreyFly-by-Bosslar.schem")).unwrap(); let schematic = SpongeSchematic::load(&PathBuf::from("../tests/warships/GreyFly-by-Bosslar.schem")).unwrap();
let pattern = Schematic::load(&PathBuf::from("../tests/gray_castle_complex.schem")).unwrap(); let pattern = SpongeSchematic::load(&PathBuf::from("../tests/gray_castle_complex.schem")).unwrap();
let matches = search(schematic, &pattern, SearchBehavior { let matches = search(schematic, &pattern, SearchBehavior {
ignore_block_data: false, ignore_block_data: false,
@ -237,10 +147,10 @@ mod tests {
ignore_entities: false, ignore_entities: false,
ignore_air: false, ignore_air: false,
air_as_any: false, air_as_any: false,
threshold: 0.9 threshold: 0.9,
invalid_nbt: false
}); });
println!("{:?}", matches);
assert_eq!(matches.len(), 1); assert_eq!(matches.len(), 1);
} }
} }

110
schemsearch-lib/src/nbt_search.rs Executable file
View File

@ -0,0 +1,110 @@
use std::borrow::ToOwned;
use std::collections::HashSet;
use std::iter::Iterator;
use lazy_static::lazy_static;
use schemsearch_files::SpongeSchematic;
const NBT_BLOCKS: &str = include_str!("blocks.txt");
lazy_static! {
static ref NBT_BLOCKS_SET: HashSet<String> = {
NBT_BLOCKS.lines().map(|x| format!("minecraft:{}", x)).collect()
};
}
pub fn has_invalid_nbt(schem: SpongeSchematic) -> bool {
if schem.block_entities.is_empty() && schem.palette.keys().any(|v| NBT_BLOCKS_SET.contains(v)) {
return true;
}
let nbt_blocks = schem.palette.iter().filter(|(k, _)| NBT_BLOCKS_SET.contains(k.to_owned())).map(|(_, v)| *v).collect::<HashSet<i32>>();
for (i, block_entity) in schem.block_data.iter().enumerate() {
if nbt_blocks.contains(&*block_entity) {
// i = x + z * Width + y * Width * Length
let x = i % schem.width as usize;
let z = (i / schem.width as usize) % schem.length as usize;
let y = i / (schem.width as usize * schem.length as usize);
if schem.block_entities.iter().any(|e| !e.pos.eq(&[x as i32, y as i32, z as i32])) {
return true;
}
}
}
return false;
}
#[allow(unused_imports)]
#[cfg(test)]
mod tests {
use nbt::CompoundTag;
use schemsearch_files::{BlockEntity, SpongeSchematic};
use super::*;
#[test]
fn test_has_invalid_nbt() {
let schem = SpongeSchematic {
data_version: 1,
metadata: CompoundTag::new(),
width: 0,
height: 0,
length: 0,
offset: [0, 0, 0],
palette_max: 1,
palette: vec![("minecraft:chest".to_owned(), 1)].into_iter().collect(),
block_data: vec![1],
block_entities: vec![],
entities: None,
};
assert_eq!(has_invalid_nbt(schem), true);
}
#[test]
fn test_has_invalid_nbt_2() {
let schem = SpongeSchematic {
data_version: 1,
metadata: CompoundTag::new(),
width: 1,
height: 1,
length: 1,
offset: [0, 0, 0],
palette_max: 1,
palette: vec![("minecraft:chest".to_owned(), 1)].into_iter().collect(),
block_data: vec![1],
block_entities: vec![
BlockEntity {
id: "minecraft:chest".to_owned(),
pos: [0, 0, 0],
}
],
entities: None,
};
assert_eq!(has_invalid_nbt(schem), false);
}
#[test]
fn test_has_invalid_nbt_3() {
let schem = SpongeSchematic {
data_version: 1,
metadata: CompoundTag::new(),
width: 2,
height: 1,
length: 1,
offset: [0, 0, 0],
palette_max: 1,
palette: vec![("minecraft:chest".to_owned(), 1), ("minecraft:stone".to_owned(), 2)].into_iter().collect(),
block_data: vec![1, 2],
block_entities: vec![
BlockEntity {
id: "minecraft:chest".to_owned(),
pos: [1, 0, 0],
}
],
entities: None,
};
assert_eq!(has_invalid_nbt(schem), true);
}
}

View File

@ -15,11 +15,12 @@
* along with this program. If not, see <https://www.gnu.org/licenses/>. * along with this program. If not, see <https://www.gnu.org/licenses/>.
*/ */
use nbt::Map; use std::collections::HashMap;
use schemsearch_files::Schematic; use nbt::CompoundTag;
use schemsearch_files::SpongeSchematic;
use crate::normalize_data; use crate::normalize_data;
fn create_reverse_palette(schem: &Schematic) -> Vec<&str> { fn create_reverse_palette(schem: &SpongeSchematic) -> Vec<&str> {
let mut reverse_palette = Vec::with_capacity(schem.palette_max as usize); let mut reverse_palette = Vec::with_capacity(schem.palette_max as usize);
(0..schem.palette_max).for_each(|_| reverse_palette.push("")); (0..schem.palette_max).for_each(|_| reverse_palette.push(""));
for (key, value) in schem.palette.iter() { for (key, value) in schem.palette.iter() {
@ -28,15 +29,15 @@ fn create_reverse_palette(schem: &Schematic) -> Vec<&str> {
reverse_palette reverse_palette
} }
pub fn strip_data(schem: &Schematic) -> Schematic { pub fn strip_data(schem: &SpongeSchematic) -> SpongeSchematic {
let mut data: Vec<i32> = Vec::new(); let mut data: Vec<i32> = Vec::new();
let mut palette: Map<String, i32> = Map::new(); let mut palette: HashMap<String, i32> = HashMap::new();
let mut palette_max: i32 = 0; let mut palette_max: i32 = 0;
let reverse_palette = create_reverse_palette(schem); let reverse_palette = create_reverse_palette(schem);
for block in schem.block_data.iter() { for block in schem.block_data.iter() {
let block_name = reverse_palette[*block as usize].clone(); let block_name = reverse_palette[*block as usize];
let block_name = block_name.split('[').next().unwrap().to_string(); let block_name = block_name.split('[').next().unwrap().to_string();
let entry = palette.entry(block_name).or_insert_with(|| { let entry = palette.entry(block_name).or_insert_with(|| {
@ -47,9 +48,8 @@ pub fn strip_data(schem: &Schematic) -> Schematic {
data.push(*entry); data.push(*entry);
} }
Schematic { SpongeSchematic {
version: schem.version, data_version: 1,
data_version: schem.data_version,
palette, palette,
palette_max, palette_max,
block_data: data, block_data: data,
@ -57,17 +57,17 @@ pub fn strip_data(schem: &Schematic) -> Schematic {
height: schem.height, height: schem.height,
length: schem.length, length: schem.length,
width: schem.width, width: schem.width,
metadata: schem.metadata.clone(), metadata: CompoundTag::new(),
offset: schem.offset.clone(), offset: [0; 3],
entities: None, entities: None,
} }
} }
pub fn match_palette_adapt(schem: &Schematic, matching_palette: &Map<String, i32>, ignore_data: bool) -> Vec<i32> { pub fn match_palette_adapt(schem: &SpongeSchematic, matching_palette: &HashMap<String, i32>, ignore_data: bool) -> Vec<i32> {
let mut data: Vec<i32> = Vec::new(); let mut data = Vec::with_capacity(schem.block_data.len());
let reverse_palette = create_reverse_palette(schem); let reverse_palette = create_reverse_palette(schem);
for x in &schem.block_data { for x in schem.block_data.as_slice().iter() {
let blockname = reverse_palette[*x as usize]; let blockname = reverse_palette[*x as usize];
let blockname = if ignore_data { normalize_data(blockname, ignore_data) } else { blockname }; let blockname = if ignore_data { normalize_data(blockname, ignore_data) } else { blockname };
let block_id = match matching_palette.get(&*blockname) { let block_id = match matching_palette.get(&*blockname) {
@ -81,10 +81,10 @@ pub fn match_palette_adapt(schem: &Schematic, matching_palette: &Map<String, i32
} }
pub fn match_palette( pub fn match_palette(
schem: &Schematic, schem: &SpongeSchematic,
pattern: &Schematic, pattern: &SpongeSchematic,
ignore_data: bool, ignore_data: bool,
) -> Schematic { ) -> SpongeSchematic {
if ignore_data { if ignore_data {
match_palette_internal(&strip_data(schem), &strip_data(pattern), ignore_data) match_palette_internal(&strip_data(schem), &strip_data(pattern), ignore_data)
} else { } else {
@ -93,24 +93,23 @@ pub fn match_palette(
} }
fn match_palette_internal( fn match_palette_internal(
schem: &Schematic, schem: &SpongeSchematic,
pattern: &Schematic, pattern: &SpongeSchematic,
ignore_data: bool, ignore_data: bool,
) -> Schematic { ) -> SpongeSchematic {
let data_pattern: Vec<i32> = match_palette_adapt(&pattern, &schem.palette, ignore_data); let data_pattern: Vec<i32> = match_palette_adapt(&pattern, &schem.palette, ignore_data);
Schematic { SpongeSchematic {
version: pattern.version.clone(), data_version: 0,
data_version: pattern.data_version.clone(),
palette: schem.palette.clone(), palette: schem.palette.clone(),
palette_max: schem.palette_max, palette_max: schem.palette_max,
block_data: data_pattern, block_data: data_pattern,
block_entities: pattern.block_entities.clone(), block_entities: pattern.block_entities.clone(),
height: pattern.height.clone(), height: pattern.height,
length: pattern.length.clone(), length: pattern.length,
width: pattern.width.clone(), width: pattern.width,
metadata: pattern.metadata.clone(), metadata: CompoundTag::new(),
offset: pattern.offset.clone(), offset: [0; 3],
entities: None, entities: None,
} }
} }

87
schemsearch-lib/src/search.rs Executable file
View File

@ -0,0 +1,87 @@
use math::round::ceil;
use schemsearch_files::SpongeSchematic;
use crate::{Match, SearchBehavior};
use crate::pattern_mapper::{match_palette, match_palette_adapt};
pub fn search(
schem: SpongeSchematic,
pattern_schem: &SpongeSchematic,
search_behavior: SearchBehavior,
) -> Vec<Match> {
if schem.width < pattern_schem.width || schem.height < pattern_schem.height || schem.length < pattern_schem.length {
return Vec::new();
}
if pattern_schem.palette.len() > schem.palette.len() {
return Vec::new();
}
let pattern_schem = match_palette(&schem, &pattern_schem, search_behavior.ignore_block_data);
let mut matches: Vec<Match> = Vec::with_capacity(4);
let pattern_data = pattern_schem.block_data.as_ptr();
let schem_data = if search_behavior.ignore_block_data {
match_palette_adapt(&schem, &pattern_schem.palette, search_behavior.ignore_block_data)
} else {
schem.block_data
};
let schem_data = schem_data.as_ptr();
let air_id = if search_behavior.ignore_air || search_behavior.air_as_any { pattern_schem.palette.get("minecraft:air").unwrap_or(&-1) } else { &-1};
let pattern_blocks = pattern_schem.block_data.len() as f32;
let i_pattern_blocks = pattern_blocks as i32;
let pattern_width = pattern_schem.width as usize;
let pattern_height = pattern_schem.height as usize;
let pattern_length = pattern_schem.length as usize;
let schem_width = schem.width as usize;
let schem_height = schem.height as usize;
let schem_length = schem.length as usize;
let skip_amount = ceil((pattern_blocks * (1.0 - search_behavior.threshold)) as f64, 0) as i32;
for y in 0..=schem_height - pattern_height {
for z in 0..=schem_length - pattern_length {
for x in 0..=schem_width - pattern_width {
let mut not_matching = 0;
'outer:
for j in 0..pattern_height {
for k in 0..pattern_length {
'inner:
for i in 0..pattern_width {
let index = (x + i) + schem_width * ((z + k) + (y + j) * schem_length);
let pattern_index = i + pattern_width * (k + j * pattern_length);
let data = unsafe { *schem_data.add(index) };
let pattern_data = unsafe { *pattern_data.add(pattern_index) };
if (search_behavior.ignore_air && data != *air_id) || (search_behavior.air_as_any && pattern_data != *air_id) {
continue 'inner;
}
if data != pattern_data {
not_matching += 1;
if not_matching >= skip_amount {
break 'outer;
}
}
}
}
}
if not_matching < skip_amount {
matches.push(Match {
x: x as u16,
y: y as u16,
z: z as u16,
percent: (i_pattern_blocks - not_matching) as f32 / pattern_blocks,
});
}
}
}
}
return matches;
}

View File

@ -1,13 +1,13 @@
[package] [package]
name = "schemsearch-sql" name = "schemsearch-sql"
version = "0.1.1" version = "0.1.3"
edition = "2021" edition = "2021"
license = "AGPL-3.0-or-later" license = "AGPL-3.0-or-later"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies] [dependencies]
sqlx = { version = "0.6", features = [ "runtime-async-std-native-tls" , "mysql" ] } sqlx = { version = "0.7", features = [ "runtime-async-std-native-tls" , "mysql" ] }
schemsearch-lib = { path = "../schemsearch-lib" } schemsearch-lib = { path = "../schemsearch-lib" }
schemsearch-files = { path = "../schemsearch-files" } schemsearch-files = { path = "../schemsearch-files" }