48 Commits

Author SHA1 Message Date
e3d0bad63c Update sqlx requirement from 0.7 to 0.8
Updates the requirements on [sqlx](https://github.com/launchbadge/sqlx) to permit the latest version.
- [Changelog](https://github.com/launchbadge/sqlx/blob/main/CHANGELOG.md)
- [Commits](https://github.com/launchbadge/sqlx/compare/v0.7.0...v0.8.0)

---
updated-dependencies:
- dependency-name: sqlx
  dependency-type: direct:production
...

Signed-off-by: dependabot[bot] <support@github.com>
2024-07-29 10:34:27 +00:00
2a584e878f Fixing... 2024-04-27 22:30:29 +02:00
33f5fe03fe Merge pull request #11 from Chaoscaot/add-invalid-nbt-arg
🔧 Add invalid_nbt flag.
2024-04-27 21:55:13 +02:00
0e6f2c3f78 🔧 Add invalid_nbt flag. 2024-04-27 21:27:42 +02:00
82108d9e36 🛠️ Fix incorrect CSV format in OutputFormat::CSV. (#10) 2024-04-27 20:19:10 +02:00
d20940f89b Improve Performance 2023-08-20 15:37:23 +02:00
e3e6e9f759 Improve Performance 2023-08-09 09:22:24 +02:00
ccae2ba393 Merge pull request #9 from Chaoscaot/dependabot/cargo/sqlx-0.7
Update sqlx requirement from 0.6 to 0.7
2023-07-11 20:48:13 +02:00
6c6c95bedd Update sqlx requirement from 0.6 to 0.7
Updates the requirements on [sqlx](https://github.com/launchbadge/sqlx) to permit the latest version.
- [Changelog](https://github.com/launchbadge/sqlx/blob/main/CHANGELOG.md)
- [Commits](https://github.com/launchbadge/sqlx/compare/v0.6.0...v0.7.0)

---
updated-dependencies:
- dependency-name: sqlx
  dependency-type: direct:production
...

Signed-off-by: dependabot[bot] <support@github.com>
2023-07-10 10:49:15 +00:00
582079c80d Bump Version 2023-05-23 20:10:41 +02:00
e25aeab065 Fix Broken Schematics Loading 2023-05-23 20:07:23 +02:00
aee3a80267 Reduce FLOPs 2023-05-01 11:32:40 +02:00
5107e04497 Update README.md 2023-04-28 00:28:34 +02:00
a357da2ce8 Fix Tests 2023-04-24 22:52:03 +02:00
eb84adb4a3 Reduce Boilerplate 2023-04-24 19:04:44 +02:00
25c4e97f71 Merge branch 'chaoscaot/support_sponge_v1_v3' 2023-04-23 12:19:22 +02:00
7d9e7f28a9 Fix SQL 2023-04-23 12:17:59 +02:00
ef1f8ed38f Merge pull request #8 from Chaoscaot/chaoscaot/support_sponge_v1_v3
Add Support for Sponge Schematic V1 and V3
2023-04-22 21:55:51 +02:00
4671f38591 Bump Version 2023-04-22 21:55:08 +02:00
5cff84538d Fix Performance 2023-04-22 21:29:18 +02:00
9a0b0535c6 remove Serde 2023-04-22 21:03:00 +02:00
a47c2f44bd Something™️ 2023-04-22 16:39:21 +02:00
246927d840 idk what im doing 2023-04-14 17:56:28 +02:00
d1a01dc0c1 Faster but not working 2023-04-13 23:49:39 +02:00
e03a805bdb Something Working :D 2023-04-13 23:16:12 +02:00
9cca860db3 Some new Ideas 2023-04-13 16:16:02 +02:00
80f5191ae8 Merge branch 'master' into chaoscaot/support_sponge_v1_v3 2023-04-13 14:33:59 +02:00
3f20cbc17f Create CODE_OF_CONDUCT.md 2023-04-13 00:25:42 +02:00
733aaa9e72 Update dependabot.yml 2023-04-13 00:21:24 +02:00
14866df17d Create dependabot.yml 2023-04-13 00:20:53 +02:00
00e3d6fd0f Fix Cache 2023-04-05 13:07:14 +02:00
fb8f935617 Fix Cache and Bump Version 2023-04-05 13:05:15 +02:00
2a112ac49c Add Output Limit 2023-04-05 02:43:28 +02:00
e7c1fd1ef7 Fixing Something? 2023-04-05 00:33:21 +02:00
80eeaad5d5 Add output for machines 2023-04-04 22:38:02 +02:00
64158cf45b Remove Timer from Progressbar 2023-04-04 21:44:43 +02:00
e4b26755ea Revert "Print Progressbar to stdout"
This reverts commit 5607dcc72c.
2023-04-04 17:34:12 +02:00
5607dcc72c Print Progressbar to stdout 2023-04-04 17:29:04 +02:00
5c9bcfc2ec Add SQL to Makefile 2023-04-04 16:31:48 +02:00
a1b5449f06 Some basic tests and basic impls 2023-04-04 12:07:33 +02:00
1df33249c4 Add Makefile for easier building 2023-04-04 00:36:40 +02:00
ef2755115c Fix tests 2023-04-01 11:14:44 +02:00
b32aac0aba Fix naming and Tests 2023-04-01 11:08:57 +02:00
a9a3e70aef Update Roadmap 2023-04-01 11:07:12 +02:00
c477a52f92 Slowdown ProgressBar and add Stderr as output 2023-04-01 11:02:49 +02:00
818de6be47 Abstractions 2023-04-01 10:30:25 +02:00
8f15b42146 Add Issue Templates 2023-03-21 18:31:50 +01:00
b8d912881d Fix SQL-Interface 2023-03-19 21:18:40 +01:00
28 changed files with 1049 additions and 291 deletions

40
.github/ISSUE_TEMPLATE/bug.yml vendored Normal file
View File

@ -0,0 +1,40 @@
name: Bug Report
description: Create a report to fix a bug
labels: [bug]
title: "[BUG] <title>"
body:
- type: textarea
id: description
attributes:
label: Description
description: A clear and concise description of what the bug is.
validations:
required: true
- type: textarea
id: reproduction
attributes:
label: Reproduction
description: Steps to reproduce the behavior.
validations:
required: true
- type: textarea
id: expected-behavior
attributes:
label: Expected Behavior
description: A clear and concise description of what you expected to happen.
validations:
required: true
- type: textarea
id: actual-behavior
attributes:
label: Actual Behavior
description: A clear and concise description of what actually happened.
validations:
required: true
- type: textarea
id: additional-context
attributes:
label: Additional Context
description: Add any other context about the problem here.
validations:
required: false

35
.github/ISSUE_TEMPLATE/feature.yml vendored Normal file
View File

@ -0,0 +1,35 @@
name: Feature Request
description: Suggest an idea for this project
title: "[FEATURE] <title>"
labels: [enhancement]
body:
- type: textarea
id: description
attributes:
label: Description
description: A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
validations:
required: true
- type: textarea
id: solution
attributes:
label: Proposed Solution
description: A clear and concise description of what you want to happen.
validations:
required: true
- type: textarea
id: alternatives
attributes:
label: Alternatives
description: A clear and concise description of any alternative solutions or features you've considered.
validations:
required: false
- type: textarea
id: additional-context
attributes:
label: Additional Context
description: Add any other context or screenshots about the feature request here.
validations:
required: false

6
.github/dependabot.yml vendored Normal file
View File

@ -0,0 +1,6 @@
version: 2
updates:
- package-ecosystem: "cargo"
directory: "/"
schedule:
interval: "weekly"

View File

@ -19,34 +19,38 @@ jobs:
- name: Cache Cargo modules
id: cache-cargo
uses: actions/cache@v3
env:
cache-name: cache-cargo-target-debug
continue-on-error: false
with:
path: target
key: ${{ runner.os }}-${{ env.cache-name }}-${{ hashFiles('**/Cargo.lock') }}
path: |
~/.cargo/registry
~/.cargo/git
key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }}
restore-keys: |
${{ runner.os }}-${{ env.cache-name }}-
${{ runner.os }}-cargo-
- name: Build
run: cargo build --verbose
run: make debug
- name: Run tests
run: cargo test --verbose -p schemsearch-lib
build-release:
needs:
- build
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Cache Cargo modules
id: cache-cargo
uses: actions/cache@v3
env:
cache-name: cache-cargo-target-release
continue-on-error: false
with:
path: target
key: ${{ runner.os }}-${{ env.cache-name }}-${{ hashFiles('**/Cargo.lock') }}
path: |
~/.cargo/registry
~/.cargo/git
key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }}
restore-keys: |
${{ runner.os }}-${{ env.cache-name }}-
${{ runner.os }}-cargo-
- name: Build
run: cargo build --verbose --release -p schemsearch-cli
run: make
- name: Upload a Build Artifact
uses: actions/upload-artifact@v3.1.2
with:

View File

@ -22,15 +22,16 @@ jobs:
- name: Cache Cargo modules
id: cache-cargo
uses: actions/cache@v3
env:
cache-name: cache-cargo-target-release
continue-on-error: false
with:
path: target
key: ${{ runner.os }}-${{ env.cache-name }}-${{ hashFiles('**/Cargo.lock') }}
path: |
~/.cargo/registry
~/.cargo/git
key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }}
restore-keys: |
${{ runner.os }}-${{ env.cache-name }}-
${{ runner.os }}-cargo-
- name: Build
run: cargo build --verbose --release -p schemsearch-cli
run: make
- name: Create Tarball
if: ${{ matrix.os != 'windows-latest' }}
run: tar -czvf schemsearch-cli-${{ matrix.os }}.tar.gz -C target/release schemsearch-cli

128
CODE_OF_CONDUCT.md Normal file
View File

@ -0,0 +1,128 @@
# Contributor Covenant Code of Conduct
## Our Pledge
We as members, contributors, and leaders pledge to make participation in our
community a harassment-free experience for everyone, regardless of age, body
size, visible or invisible disability, ethnicity, sex characteristics, gender
identity and expression, level of experience, education, socio-economic status,
nationality, personal appearance, race, religion, or sexual identity
and orientation.
We pledge to act and interact in ways that contribute to an open, welcoming,
diverse, inclusive, and healthy community.
## Our Standards
Examples of behavior that contributes to a positive environment for our
community include:
* Demonstrating empathy and kindness toward other people
* Being respectful of differing opinions, viewpoints, and experiences
* Giving and gracefully accepting constructive feedback
* Accepting responsibility and apologizing to those affected by our mistakes,
and learning from the experience
* Focusing on what is best not just for us as individuals, but for the
overall community
Examples of unacceptable behavior include:
* The use of sexualized language or imagery, and sexual attention or
advances of any kind
* Trolling, insulting or derogatory comments, and personal or political attacks
* Public or private harassment
* Publishing others' private information, such as a physical or email
address, without their explicit permission
* Other conduct which could reasonably be considered inappropriate in a
professional setting
## Enforcement Responsibilities
Community leaders are responsible for clarifying and enforcing our standards of
acceptable behavior and will take appropriate and fair corrective action in
response to any behavior that they deem inappropriate, threatening, offensive,
or harmful.
Community leaders have the right and responsibility to remove, edit, or reject
comments, commits, code, wiki edits, issues, and other contributions that are
not aligned to this Code of Conduct, and will communicate reasons for moderation
decisions when appropriate.
## Scope
This Code of Conduct applies within all community spaces, and also applies when
an individual is officially representing the community in public spaces.
Examples of representing our community include using an official e-mail address,
posting via an official social media account, or acting as an appointed
representative at an online or offline event.
## Enforcement
Instances of abusive, harassing, or otherwise unacceptable behavior may be
reported to the community leaders responsible for enforcement at
chaoscaot@zohomail.eu.
All complaints will be reviewed and investigated promptly and fairly.
All community leaders are obligated to respect the privacy and security of the
reporter of any incident.
## Enforcement Guidelines
Community leaders will follow these Community Impact Guidelines in determining
the consequences for any action they deem in violation of this Code of Conduct:
### 1. Correction
**Community Impact**: Use of inappropriate language or other behavior deemed
unprofessional or unwelcome in the community.
**Consequence**: A private, written warning from community leaders, providing
clarity around the nature of the violation and an explanation of why the
behavior was inappropriate. A public apology may be requested.
### 2. Warning
**Community Impact**: A violation through a single incident or series
of actions.
**Consequence**: A warning with consequences for continued behavior. No
interaction with the people involved, including unsolicited interaction with
those enforcing the Code of Conduct, for a specified period of time. This
includes avoiding interactions in community spaces as well as external channels
like social media. Violating these terms may lead to a temporary or
permanent ban.
### 3. Temporary Ban
**Community Impact**: A serious violation of community standards, including
sustained inappropriate behavior.
**Consequence**: A temporary ban from any sort of interaction or public
communication with the community for a specified period of time. No public or
private interaction with the people involved, including unsolicited interaction
with those enforcing the Code of Conduct, is allowed during this period.
Violating these terms may lead to a permanent ban.
### 4. Permanent Ban
**Community Impact**: Demonstrating a pattern of violation of community
standards, including sustained inappropriate behavior, harassment of an
individual, or aggression toward or disparagement of classes of individuals.
**Consequence**: A permanent ban from any sort of public interaction within
the community.
## Attribution
This Code of Conduct is adapted from the [Contributor Covenant][homepage],
version 2.0, available at
https://www.contributor-covenant.org/version/2/0/code_of_conduct.html.
Community Impact Guidelines were inspired by [Mozilla's code of conduct
enforcement ladder](https://github.com/mozilla/diversity).
[homepage]: https://www.contributor-covenant.org
For answers to common questions about this code of conduct, see the FAQ at
https://www.contributor-covenant.org/faq. Translations are available at
https://www.contributor-covenant.org/translations.

1
Cargo.toml Normal file → Executable file
View File

@ -7,6 +7,7 @@ members = [
"schemsearch-sql",
"schemsearch-java"
]
resolver = "2"
[profile.small]
inherits = "release"

28
Makefile Normal file
View File

@ -0,0 +1,28 @@
default:
@echo "Building (Release)...";
cargo rustc --release --color=always -p schemsearch-cli -- -C target-feature=+avx2
sql:
@echo "Building (Release)...";
cargo rustc --release --color=always -p schemsearch-cli --features sql -- -C target-feature=+avx2
debug:
@echo "Building (Debug)...";
cargo build -p schemsearch-cli
install: default
@echo "Installing...";
install -Dm755 target/release/schemsearch-cli /usr/bin/schemsearch
uninstall:
@echo "Uninstalling...";
rm -f /usr/bin/schemsearch
java:
@echo "Building Java...";
@echo "WARNING: This is WORK IN PROGRESS!";
javac SchemSearch.java
clean:
@echo "Cleaning...";
cargo clean

View File

@ -1,15 +1,15 @@
# schemsearch
### A *simple* CLI tool to search in Sponge V2 Schematic files
### A *simple* CLI tool to search in Sponge Schematic files
---
## WARNING: This is a work in progress and is really simple right now. It will be improved in the future.
| Feature | Status |
|------------------------|--------|
| Block search | ✅ |
| Block data less search | ✅ |
| Tile entities search | ❌ |
| Entities search | ❌ |
| Feature | Status |
|---------------------------|--------|
| Block search | ✅ |
| Block data less search | ✅ |
| Tile entities data search | ❌ |
| Entities search | ❌ |
---
@ -41,12 +41,12 @@ schemsearch-cli --help
---
## Roadmap
A list of features that are planned to be implemented in the future. In order of priority.
- [ ] Tile entities search
- [ ] Use AVX2 for faster search
- [ ] Tile entities data search
- [ ] Entities search
- [ ] Better error handling
- [ ] Web interface
- [ ] McEdit search
- [ ] McEdit Schematic support
---

View File

@ -1,6 +1,6 @@
[package]
name = "schemsearch-cli"
version = "0.1.1"
version = "0.1.7"
edition = "2021"
license = "AGPL-3.0-or-later"
@ -13,7 +13,7 @@ schemsearch-sql = { path = "../schemsearch-sql", optional = true }
clap = { version = "4.1.8", features = ["cargo"] }
futures = { version = "0.3", optional = true }
sqlx = { version = "0.6", features = [ "runtime-async-std-native-tls" , "mysql" ], optional = true }
sqlx = { version = "0.8", features = [ "runtime-async-std-native-tls" , "mysql" ], optional = true }
rayon = "1.7.0"
indicatif = { version = "0.17.3", features = ["rayon"] }
serde = "1.0.157"

View File

@ -1,5 +1,5 @@
use serde::{Deserialize, Serialize};
use schemsearch_lib::SearchBehavior;
use schemsearch_lib::{Match, SearchBehavior};
#[derive(Serialize, Deserialize, Debug)]
#[serde(tag = "event")]
@ -12,10 +12,8 @@ pub enum JsonEvent {
#[derive(Serialize, Deserialize, Debug)]
pub struct FoundEvent {
pub name: String,
pub x: u16,
pub y: u16,
pub z: u16,
pub percent: f32,
#[serde(flatten, rename = "match")]
pub match_: Match,
}
#[derive(Serialize, Deserialize, Debug)]

144
schemsearch-cli/src/main.rs Normal file → Executable file
View File

@ -18,6 +18,7 @@
mod types;
mod json_output;
mod sinks;
mod stderr;
use std::fmt::Debug;
use std::io::Write;
@ -25,8 +26,8 @@ use clap::{command, Arg, ArgAction, ValueHint};
use std::path::PathBuf;
use std::str::FromStr;
use clap::error::ErrorKind;
use schemsearch_lib::{search, SearchBehavior};
use crate::types::{PathSchematicSupplier, SchematicSupplierType};
use schemsearch_lib::{Match, SearchBehavior};
use crate::types::{PathSchematicSupplier, SchematicSupplier, SchematicSupplierType};
#[cfg(feature = "sql")]
use futures::executor::block_on;
use rayon::prelude::*;
@ -37,18 +38,21 @@ use schemsearch_sql::filter::SchematicFilter;
use schemsearch_sql::load_all_schematics;
#[cfg(feature = "sql")]
use crate::types::SqlSchematicSupplier;
use indicatif::{ParallelProgressIterator, ProgressStyle};
use schemsearch_files::Schematic;
use indicatif::*;
use schemsearch_files::SpongeSchematic;
use crate::sinks::{OutputFormat, OutputSink};
use crate::stderr::MaschineStdErr;
use schemsearch_lib::nbt_search::has_invalid_nbt;
use schemsearch_lib::search::search;
fn main() {
#[allow(unused_mut)]
let mut cmd = command!("schemsearch")
let mut cmd = command!("schemsearch")
.arg(
Arg::new("pattern")
.help("The pattern to search for")
.required(true)
.value_hint(ValueHint::FilePath)
.required_unless_present("invalid-nbt")
.action(ArgAction::Set),
)
.arg(
@ -92,9 +96,16 @@ fn main() {
.long("air-as-any")
.action(ArgAction::SetTrue),
)
.arg(
Arg::new("invalid-nbt")
.help("Search for Schematics with Invalid or missing NBT data")
.short('I')
.long("invalid-nbt")
.action(ArgAction::SetTrue),
)
.arg(
Arg::new("output")
.help("The output format and path [Format:Path] available formats: text, json, csv; available paths: std, (file path)")
.help("The output format and path [Format:Path] available formats: text, json, csv; available paths: std, err, (file path)")
.short('o')
.long("output")
.action(ArgAction::Append)
@ -132,20 +143,38 @@ fn main() {
)
.arg(
Arg::new("threads")
.help("The number of threads to use [0 = Available Threads]")
.help("The number of threads to use [0 = all Available Threads]")
.short('T')
.long("threads")
.action(ArgAction::Set)
.default_value("0")
.value_parser(|s: &str| s.parse::<usize>().map_err(|e| e.to_string())),
)
.arg(
Arg::new("machine")
.help("Output for machines")
.short('m')
.long("machine")
.action(ArgAction::Set)
.default_value("0")
.value_parser(|s: &str| s.parse::<u16>().map_err(|e| e.to_string()))
)
.arg(
Arg::new("limit")
.help("The maximum number of matches to return [0 = Unlimited]")
.short('l')
.long("limit")
.action(ArgAction::Set)
.default_value("50")
.value_parser(|s: &str| s.parse::<usize>().map_err(|e| e.to_string())),
)
.about("Searches for a pattern in a schematic")
.bin_name("schemsearch");
#[cfg(feature = "sql")]
let mut cmd = cmd
let mut cmd = cmd
.arg(
Arg::new("sql")
Arg::new("sql")
.help("Use the SteamWar SQL Database")
.short('s')
.long("sql")
@ -184,18 +213,22 @@ fn main() {
air_as_any: matches.get_flag("air-as-any"),
ignore_entities: matches.get_flag("ignore-entities"),
threshold: *matches.get_one::<f32>("threshold").expect("Couldn't get threshold"),
invalid_nbt: matches.get_flag("invalid-nbt"),
};
let pattern = match Schematic::load(&PathBuf::from(matches.get_one::<String>("pattern").unwrap())) {
Ok(x) => x,
Err(e) => {
cmd.error(ErrorKind::Io, format!("Error while loading Pattern: {}", e.to_string())).exit();
}
let pattern = match matches.get_one::<String>("pattern") {
Some(p) => match SpongeSchematic::load(&PathBuf::from(p)) {
Ok(x) => Some(x),
Err(e) => {
cmd.error(ErrorKind::Io, format!("Error while loading Pattern: {}", e.to_string())).exit();
}
},
None => None,
};
let mut schematics: Vec<SchematicSupplierType> = Vec::new();
match matches.get_many::<String>("schematic") {
None => {},
None => {}
Some(x) => {
let paths = x.map(|x| PathBuf::from(x));
for path in paths {
@ -206,12 +239,12 @@ fn main() {
.filter(|x| x.path().is_file())
.filter(|x| x.path().extension().unwrap().to_str().unwrap() == "schem")
.for_each(|x| {
schematics.push(SchematicSupplierType::PATH(Box::new(PathSchematicSupplier {
schematics.push(SchematicSupplierType::PATH(PathSchematicSupplier {
path: x.path(),
})))
}))
});
} else if path.extension().unwrap().to_str().unwrap() == "schem" {
schematics.push(SchematicSupplierType::PATH(Box::new(PathSchematicSupplier { path })));
schematics.push(SchematicSupplierType::PATH(PathSchematicSupplier { path }));
}
}
}
@ -227,7 +260,7 @@ fn main() {
filter = filter.name(x.collect());
}
for schem in block_on(load_all_schematics(filter)) {
schematics.push(SchematicSupplierType::SQL(SqlSchematicSupplier{
schematics.push(SchematicSupplierType::SQL(SqlSchematicSupplier {
node: schem
}))
};
@ -246,37 +279,36 @@ fn main() {
ThreadPoolBuilder::new().num_threads(*matches.get_one::<usize>("threads").expect("Could not get threads")).build_global().unwrap();
let matches: Vec<SearchResult> = schematics.par_iter().progress_with_style(ProgressStyle::with_template("[{elapsed}, ETA: {eta}] {wide_bar} {pos}/{len} {per_sec}").unwrap()).map(|schem| {
let bar = ProgressBar::new(schematics.len() as u64); // "maschine"
bar.set_style(ProgressStyle::with_template("[{elapsed}, ETA: {eta}] {wide_bar} {pos}/{len} {per_sec}").unwrap());
let term_size = *matches.get_one::<u16>("machine").expect("Could not get machine");
if term_size != 0 {
bar.set_draw_target(ProgressDrawTarget::term_like(Box::new(MaschineStdErr { size: term_size })))
}
let max_matching = *matches.get_one::<usize>("limit").expect("Could not get max-matching");
let matches: Vec<SearchResult> = schematics.par_iter().progress_with(bar).map(|schem| {
match schem {
SchematicSupplierType::PATH(schem) => {
let schematic = match load_schem(&schem.path) {
Some(x) => x,
None => return SearchResult {
name: schem.get_name(),
matches: vec![]
matches: Vec::default(),
}
};
SearchResult {
name: schem.get_name(),
matches: search(schematic, &pattern, search_behavior)
}
search_in_schem(schematic, pattern.as_ref(), search_behavior, schem)
}
#[cfg(feature = "sql")]
SchematicSupplierType::SQL(schem) => {
match schem.get_schematic() {
Ok(schematic) => {
SearchResult {
name: schem.get_name(),
matches: search(schematic, &pattern, search_behavior)
}
}
Ok(schematic) => search_in_schem(schematic, pattern.as_ref(), search_behavior, schem),
Err(e) => {
if !output_std && !output_std_csv {
println!("Error while loading schematic ({}): {}", schem.get_name(), e.to_string());
}
eprintln!("Error while loading schematic ({}): {}", schem.get_name(), e.to_string());
SearchResult {
name: schem.get_name(),
matches: vec![]
matches: Vec::default(),
}
}
}
@ -284,13 +316,19 @@ fn main() {
}
}).collect();
for matching in matches {
let mut matches_count = 0;
'outer: for matching in matches {
let schem_name = matching.name;
let matching = matching.matches;
for x in matching {
for out in &mut output {
write!(out.1, "{}", out.0.found_match(&schem_name, x)).unwrap();
}
matches_count += 1;
if max_matching != 0 && matches_count >= max_matching {
break 'outer;
}
}
}
@ -301,8 +339,34 @@ fn main() {
}
}
fn load_schem(schem_path: &PathBuf) -> Option<Schematic> {
match Schematic::load(schem_path) {
fn search_in_schem(schematic: SpongeSchematic, pattern: Option<&SpongeSchematic>, search_behavior: SearchBehavior, schem: &impl SchematicSupplier) -> SearchResult {
if search_behavior.invalid_nbt {
if has_invalid_nbt(schematic) {
SearchResult {
name: schem.get_name(),
matches: vec![Match {
x: 0,
y: 0,
z: 0,
percent: 1.0,
}],
}
} else {
SearchResult {
name: schem.get_name(),
matches: vec![],
}
}
} else {
SearchResult {
name: schem.get_name(),
matches: search(schematic, pattern.unwrap(), search_behavior),
}
}
}
fn load_schem(schem_path: &PathBuf) -> Option<SpongeSchematic> {
match SpongeSchematic::load(schem_path) {
Ok(x) => Some(x),
Err(e) => {
println!("Error while loading schematic ({}): {}", schem_path.to_str().unwrap(), e.to_string());
@ -314,6 +378,6 @@ fn load_schem(schem_path: &PathBuf) -> Option<Schematic> {
#[derive(Debug, Clone)]
struct SearchResult {
name: String,
matches: Vec<(u16, u16, u16, f32)>,
matches: Vec<Match>,
}

24
schemsearch-cli/src/sinks.rs Normal file → Executable file
View File

@ -2,12 +2,15 @@ use std::fs::File;
use std::io::BufWriter;
use std::str::FromStr;
use std::io::Write;
use schemsearch_lib::SearchBehavior;
use std::time::Duration;
use indicatif::HumanDuration;
use schemsearch_lib::{Match, SearchBehavior};
use crate::json_output::{EndEvent, FoundEvent, InitEvent, JsonEvent};
#[derive(Debug, Clone)]
pub enum OutputSink {
Stdout,
Stderr,
File(String),
}
@ -37,6 +40,7 @@ impl FromStr for OutputSink {
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"std" => Ok(OutputSink::Stdout),
"err" => Ok(OutputSink::Stderr),
_ => Ok(OutputSink::File(s.to_string()))
}
}
@ -45,23 +49,21 @@ impl FromStr for OutputSink {
impl OutputSink {
pub fn output(&self) -> Box<dyn Write> {
match self {
OutputSink::Stdout => Box::new(std::io::stdout().lock()),
OutputSink::Stdout => Box::new(std::io::stdout()),
OutputSink::Stderr => Box::new(std::io::stderr()),
OutputSink::File(path) => Box::new(BufWriter::new(File::create(path).unwrap()))
}
}
}
impl OutputFormat {
pub fn found_match(&self, name: &String, pos: (u16, u16, u16, f32)) -> String {
pub fn found_match(&self, name: &String, pos: Match) -> String {
match self {
OutputFormat::Text => format!("Found match in '{}' at x: {}, y: {}, z: {}, % = {}\n", name, pos.0, pos.1, pos.2, pos.3),
OutputFormat::CSV => format!("{},{},{},{},{}\n", name, pos.0, pos.1, pos.2, pos.3),
OutputFormat::Text => format!("Found match in '{}' at x: {}, y: {}, z: {}, % = {}\n", name, pos.x, pos.y, pos.z, pos.percent),
OutputFormat::CSV => format!("{},{},{},{},{}\n", name, pos.x, pos.y, pos.z, pos.percent),
OutputFormat::JSON => format!("{}\n", serde_json::to_string(&JsonEvent::Found(FoundEvent {
name: name.clone(),
x: pos.0,
y: pos.1,
z: pos.2,
percent: pos.3,
match_: pos,
})).unwrap())
}
}
@ -69,7 +71,7 @@ impl OutputFormat {
pub fn start(&self, total: u32, search_behavior: &SearchBehavior, start_time: u128) -> String {
match self {
OutputFormat::Text => format!("Starting search in {} schematics\n", total),
OutputFormat::CSV => format!("Name,X,Y,Z,Percent\n"),
OutputFormat::CSV => "Name,X,Y,Z,Percent\n".to_owned(),
OutputFormat::JSON => format!("{}\n", serde_json::to_string(&JsonEvent::Init(InitEvent {
total,
search_behavior: search_behavior.clone(),
@ -80,7 +82,7 @@ impl OutputFormat {
pub fn end(&self, end_time: u128) -> String {
match self {
OutputFormat::Text => format!("Search complete in {}s\n", end_time / 1000),
OutputFormat::Text => format!("Search complete in {}\n", HumanDuration(Duration::from_millis(end_time as u64))),
OutputFormat::CSV => format!("{}\n", end_time),
OutputFormat::JSON => format!("{}\n", serde_json::to_string(&JsonEvent::End(EndEvent{ end_time })).unwrap())
}

View File

@ -0,0 +1,44 @@
use std::fmt::Debug;
use std::io::Write;
use indicatif::TermLike;
#[derive(Debug)]
pub struct MaschineStdErr { pub(crate) size: u16}
impl TermLike for MaschineStdErr {
fn width(&self) -> u16 {
self.size
}
fn move_cursor_up(&self, _: usize) -> std::io::Result<()> {
Ok(())
}
fn move_cursor_down(&self, _: usize) -> std::io::Result<()> {
Ok(())
}
fn move_cursor_right(&self, _: usize) -> std::io::Result<()> {
Ok(())
}
fn move_cursor_left(&self, _: usize) -> std::io::Result<()> {
Ok(())
}
fn write_line(&self, s: &str) -> std::io::Result<()> {
writeln!(std::io::stderr(), "{}", s)
}
fn write_str(&self, s: &str) -> std::io::Result<()> {
write!(std::io::stderr(), "{}", s)
}
fn clear_line(&self) -> std::io::Result<()> {
Ok(())
}
fn flush(&self) -> std::io::Result<()> {
std::io::stderr().flush()
}
}

27
schemsearch-cli/src/types.rs Normal file → Executable file
View File

@ -15,24 +15,32 @@
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
#[cfg(feature = "sql")]
use std::io::Cursor;
use std::path::PathBuf;
#[cfg(feature = "sql")]
use futures::executor::block_on;
#[cfg(feature = "sql")]
use schemsearch_files::SpongeSchematic;
#[cfg(feature = "sql")]
use schemsearch_sql::{load_schemdata, SchematicNode};
pub enum SchematicSupplierType {
PATH(Box<PathSchematicSupplier>),
PATH(PathSchematicSupplier),
#[cfg(feature = "sql")]
SQL(SqlSchematicSupplier),
}
pub trait SchematicSupplier {
fn get_name(&self) -> String;
}
pub struct PathSchematicSupplier {
pub path: PathBuf,
}
impl PathSchematicSupplier {
pub fn get_name(&self) -> String {
impl SchematicSupplier for PathSchematicSupplier {
fn get_name(&self) -> String {
self.path.file_stem().unwrap().to_str().unwrap().to_string()
}
}
@ -44,12 +52,17 @@ pub struct SqlSchematicSupplier {
#[cfg(feature = "sql")]
impl SqlSchematicSupplier {
pub fn get_schematic(&self) -> Result<Schematic, String> {
let schemdata = block_on(load_schemdata(self.node.id));
Schematic::load_data(schemdata.as_slice())
pub fn get_schematic(&self) -> Result<SpongeSchematic, String> {
let mut schemdata = block_on(load_schemdata(self.node.id));
SpongeSchematic::load_data(&mut Cursor::new(schemdata.as_mut_slice()))
}
}
pub fn get_name(&self) -> String {
#[cfg(feature = "sql")]
impl SchematicSupplier for SqlSchematicSupplier {
fn get_name(&self) -> String {
format!("{} ({})", self.node.name, self.node.id)
}
}

View File

@ -1,6 +1,6 @@
[package]
name = "schemsearch_faster"
version = "0.1.1"
version = "0.1.3"
edition = "2021"
license = "AGPL-3.0-or-later"

View File

@ -16,9 +16,9 @@
*/
use nbt::Map;
use schemsearch_files::Schematic;
use schemsearch_files::SpongeV2Schematic;
pub fn convert_to_search_space(schem: &Schematic, palette: &Vec<String>) -> Vec<Vec<u8>> {
pub fn convert_to_search_space(schem: &SpongeV2Schematic, palette: &Vec<String>) -> Vec<Vec<u8>> {
let mut data: Vec<Vec<u8>> = Vec::with_capacity(palette.len());
let block_data = &schem.block_data;
for name in palette {
@ -48,26 +48,26 @@ pub fn unwrap_palette(palette: &Map<String, i32>) -> Vec<String> {
#[cfg(test)]
mod tests {
use std::path::{Path, PathBuf};
use schemsearch_files::Schematic;
use schemsearch_files::SpongeV2Schematic;
use crate::{convert_to_search_space, unwrap_palette};
//#[test]
pub fn test() {
let schematic = Schematic::load(&PathBuf::from("../tests/Pattern.schem")).unwrap();
let schematic = SpongeV2Schematic::load(&PathBuf::from("../tests/Pattern.schem")).unwrap();
dbg!(convert_to_search_space(&schematic, &unwrap_palette(&schematic.palette)));
}
//#[test]
pub fn test_2() {
let schematic = Schematic::load(&PathBuf::from("../tests/Pattern.schem")).unwrap();
let schematic2 = Schematic::load(&PathBuf::from("../tests/Random.schem")).unwrap();
let schematic = SpongeV2Schematic::load(&PathBuf::from("../tests/Pattern.schem")).unwrap();
let schematic2 = SpongeV2Schematic::load(&PathBuf::from("../tests/Random.schem")).unwrap();
println!("{:?}", convert_to_search_space(&schematic2, &unwrap_palette(&schematic.palette)));
}
//#[test]
pub fn test_big() {
let schematic = Schematic::load(&PathBuf::from("../tests/endstone.schem")).unwrap();
let schematic2 = Schematic::load(&PathBuf::from("../tests/simple.schem")).unwrap();
let schematic = SpongeV2Schematic::load(&PathBuf::from("../tests/endstone.schem")).unwrap();
let schematic2 = SpongeV2Schematic::load(&PathBuf::from("../tests/simple.schem")).unwrap();
let _ = convert_to_search_space(&schematic2, &unwrap_palette(&schematic.palette));
}
}

View File

@ -1,10 +1,11 @@
[package]
name = "schemsearch-files"
version = "0.1.1"
version = "0.1.5"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
hematite-nbt = "0.5.2"
serde = "1.0.152"
flate2 = "1.0.25"
named-binary-tag = "0.6"

View File

@ -15,81 +15,176 @@
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
use std::collections::hash_map::HashMap;
use std::io::Read;
use std::path::PathBuf;
use nbt::{Map, Value};
use serde::{Deserialize, Deserializer, Serialize};
use nbt::{CompoundTag, Tag};
#[derive(Serialize, Deserialize, Debug)]
pub struct Schematic {
#[serde(rename = "Version")]
pub version: i32,
#[serde(rename = "DataVersion")]
#[derive(Clone, Debug)]
pub struct SpongeSchematic {
pub data_version: i32,
#[serde(rename = "Metadata")]
pub metadata: Map<String, Value>,
#[serde(rename = "Width")]
pub metadata: CompoundTag,
pub width: u16,
#[serde(rename = "Height")]
pub height: u16,
#[serde(rename = "Length")]
pub length: u16,
#[serde(rename = "Offset")]
pub offset: [i32; 3],
#[serde(rename = "PaletteMax")]
pub palette_max: i32,
#[serde(rename = "Palette")]
pub palette: Map<String, i32>,
#[serde(rename = "BlockData", deserialize_with = "read_blockdata")]
pub palette: HashMap<String, i32>,
pub block_data: Vec<i32>,
#[serde(rename = "BlockEntities")]
pub block_entities: Vec<BlockEntity>,
#[serde(rename = "Entities")]
pub entities: Option<Vec<Entity>>,
}
fn read_blockdata<'de, D>(deserializer: D) -> Result<Vec<i32>, D::Error>
where
D: Deserializer<'de>,
{
let s: Vec<i8> = Deserialize::deserialize(deserializer)?;
Ok(read_varint_array(&s))
#[derive(Clone, Debug)]
pub struct BlockContainer {
pub palette: HashMap<String, i32>,
pub block_data: Vec<i32>,
pub block_entities: Vec<BlockEntity>,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
#[derive(Debug, Clone)]
pub struct BlockEntity {
#[serde(rename = "Id")]
pub id: String,
#[serde(rename = "Pos")]
pub pos: [i32; 3],
}
#[derive(Serialize, Deserialize, Debug, Clone)]
#[derive(Debug, Clone)]
pub struct BlockEntityV3 {
pub id: String,
pub pos: [i32; 3],
pub data: HashMap<String, Tag>,
}
#[derive(Debug, Clone)]
pub struct Entity {
#[serde(rename = "Id")]
pub id: String,
#[serde(rename = "Pos")]
pub pos: [i32; 3],
}
impl Schematic {
pub fn load_data<R>(data: R) -> Result<Schematic, String> where R: Read {
let schematic: Schematic = match nbt::from_gzip_reader(data) {
Ok(schem) => schem,
Err(e) => return Err(format!("Failed to parse schematic: {}", e))
};
Ok(schematic)
impl SpongeSchematic {
pub fn load_data<R>(data: &mut R) -> Result<SpongeSchematic, String> where R: Read {
let nbt: CompoundTag = nbt::decode::read_gzip_compound_tag(data).map_err(|e| e.to_string())?;
let version = nbt.get_i32("Version").unwrap_or_else(|_| {
return if nbt.contains_key("Blocks") {
3
} else if nbt.contains_key("BlockEntities") {
2
} else if nbt.contains_key("TileEntities") {
1
} else {
-1
};
});
match version {
1 => SpongeSchematic::from_nbt_1(nbt),
2 => SpongeSchematic::from_nbt_2(nbt),
3 => SpongeSchematic::from_nbt_3(nbt),
_ => Err("Invalid schematic: Unknown Version".to_string()),
}
}
pub fn load(path: &PathBuf) -> Result<Schematic, String> {
let file = match std::fs::File::open(path) {
Ok(x) => x,
Err(_) => return Err(format!("Failed to open file: {}", path.to_str().unwrap()))
};
Schematic::load_data(file)
pub fn load(path: &PathBuf) -> Result<SpongeSchematic, String> {
let mut file = std::fs::File::open(path).map_err(|e| e.to_string())?;
Self::load_data(&mut file)
}
pub fn from_nbt_1(nbt: CompoundTag) -> Result<Self, String> {
Ok(Self {
data_version: 0,
metadata: nbt.get_compound_tag("Metadata").map_err(|e| e.to_string())?.clone(),
width: nbt.get_i16("Width").map_err(|e| e.to_string())? as u16,
height: nbt.get_i16("Height").map_err(|e| e.to_string())? as u16,
length: nbt.get_i16("Length").map_err(|e| e.to_string())? as u16,
offset: read_offset(nbt.get_i32_vec("Offset").map_err(|e| e.to_string())?)?,
palette_max: nbt.get_i32("PaletteMax").map_err(|e| e.to_string())?,
palette: read_palette(nbt.get_compound_tag("Palette").map_err(|e| e.to_string())?),
block_data: read_blocks(nbt.get_i8_vec("BlockData").map_err(|e| e.to_string())?),
block_entities: read_tile_entities(nbt.get_compound_tag_vec("TileEntities").unwrap_or_else(|_| vec![]))?,
entities: None,
})
}
pub fn from_nbt_2(nbt: CompoundTag) -> Result<Self, String> {
Ok(Self{
data_version: nbt.get_i32("DataVersion").map_err(|e| e.to_string())?,
metadata: nbt.get_compound_tag("Metadata").map_err(|e| e.to_string())?.clone(),
width: nbt.get_i16("Width").map_err(|e| e.to_string())? as u16,
height: nbt.get_i16("Height").map_err(|e| e.to_string())? as u16,
length: nbt.get_i16("Length").map_err(|e| e.to_string())? as u16,
offset: read_offset(nbt.get_i32_vec("Offset").map_err(|e| e.to_string())?)?,
palette_max: nbt.get_i32("PaletteMax").map_err(|e| e.to_string())?,
palette: read_palette(nbt.get_compound_tag("Palette").map_err(|e| e.to_string())?),
block_data: read_blocks(nbt.get_i8_vec("BlockData").map_err(|e| e.to_string())?),
block_entities: read_tile_entities(nbt.get_compound_tag_vec("BlockEntities").unwrap_or_else(|_| vec![]))?,
entities: None,
})
}
pub fn from_nbt_3(nbt: CompoundTag) -> Result<Self, String> {
let blocks = nbt.get_compound_tag("Blocks").map_err(|e| e.to_string())?;
Ok(Self{
data_version: nbt.get_i32("DataVersion").map_err(|e| e.to_string())?,
metadata: nbt.get_compound_tag("Metadata").map_err(|e| e.to_string())?.clone(),
width: nbt.get_i16("Width").map_err(|e| e.to_string())? as u16,
height: nbt.get_i16("Height").map_err(|e| e.to_string())? as u16,
length: nbt.get_i16("Length").map_err(|e| e.to_string())? as u16,
offset: read_offset(nbt.get_i32_vec("Offset").map_err(|e| e.to_string())?)?,
palette_max: compute_palette_max(blocks.get_compound_tag("Palette").map_err(|e| e.to_string())?),
palette: read_palette(blocks.get_compound_tag("Palette").map_err(|e| e.to_string())?),
block_data: read_blocks(blocks.get_i8_vec("BlockData").map_err(|e| e.to_string())?),
block_entities: read_tile_entities(blocks.get_compound_tag_vec("BlockEntities").unwrap_or_else(|_| vec![]))?,
entities: None,
})
}
}
fn read_tile_entities(tag: Vec<&CompoundTag>) -> Result<Vec<BlockEntity>, String> {
let mut tile_entities = Vec::new();
for t in tag {
tile_entities.push(BlockEntity {
id: t.get_str("Id").map_err(|e| e.to_string())?.to_string(),
pos: read_offset(t.get("Pos").map_err(|e| e.to_string())?)?,
});
}
Ok(tile_entities)
}
#[inline]
fn read_offset(offset: &Vec<i32>) -> Result<[i32; 3], String> {
match offset.len() {
3 => Ok([offset[0], offset[1], offset[2]]),
_ => Err("Invalid schematic: read_offset wrong length".to_string()),
}
}
#[inline]
fn read_palette(p: &CompoundTag) -> HashMap<String, i32> {
let mut palette = HashMap::new();
for (key, value) in p.iter() {
match value {
Tag::Int(n) => { palette.insert(key.clone(), *n); },
_ => {},
};
}
palette
}
#[inline]
fn compute_palette_max(palette: &CompoundTag) -> i32 {
palette.iter().map(|(_, v)| v).filter_map(|v| match v {
Tag::Int(n) => Some(*n),
_ => None,
}).max().unwrap_or(0)
}
#[inline]
fn read_blocks(blockdata: &Vec<i8>) -> Vec<i32> {
read_varint_array(blockdata)
}
#[inline]
pub fn read_varint_array(read: &Vec<i8>) -> Vec<i32> {
let mut data = Vec::new();
let mut value: i32 = 0;

View File

@ -1,6 +1,6 @@
[package]
name = "schemsearch-java"
version = "0.1.1"
version = "0.1.3"
edition = "2021"
license = "AGPL-3.0-or-later"

View File

@ -21,7 +21,7 @@ use jni::JNIEnv;
use jni::objects::{JClass, JString};
use jni::sys::jstring;
use schemsearch_files::Schematic;
use schemsearch_files::SpongeV2Schematic;
use schemsearch_lib::{search, SearchBehavior};
#[no_mangle]
@ -32,8 +32,8 @@ pub extern "system" fn Java_SchemSearch_search<'local>(mut env: JNIEnv<'local>,
pattern_path: JString<'local>) -> jstring {
let schematic_path: String = env.get_string(&schematic_path).expect("Couldn't get java string!").into();
let pattern_path: String = env.get_string(&pattern_path).expect("Couldn't get java string!").into();
let schematic = Schematic::load(&PathBuf::from(&schematic_path)).unwrap();
let pattern = Schematic::load(&PathBuf::from(&pattern_path)).unwrap();
let schematic = SpongeV2Schematic::load(&PathBuf::from(&schematic_path)).unwrap();
let pattern = SpongeV2Schematic::load(&PathBuf::from(&pattern_path)).unwrap();
let matches = search(schematic, &pattern, SearchBehavior {
ignore_block_data: true,
@ -45,8 +45,8 @@ pub extern "system" fn Java_SchemSearch_search<'local>(mut env: JNIEnv<'local>,
});
let mut result = String::new();
for (x, y, z, p) in matches {
result.push_str(&format!("{}, {}, {}, {};", x, y, z, p));
for m in matches {
result.push_str(&format!("{}, {}, {}, {};", m.x, m.y, m.z, m.percent));
}
result.remove(result.len() - 1);
let output = env.new_string(result).expect("Couldn't create java string!");

8
schemsearch-lib/Cargo.toml Normal file → Executable file
View File

@ -1,12 +1,14 @@
[package]
name = "schemsearch-lib"
version = "0.1.1"
version = "0.1.7"
edition = "2021"
license = "AGPL-3.0-or-later"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
hematite-nbt = "0.5.2"
serde = "1.0.152"
serde = { version = "1.0.160", features = ["derive"] }
schemsearch-files = { path = "../schemsearch-files" }
named-binary-tag = "0.6"
libmath = "0.2.1"
lazy_static = "1.4.0"

163
schemsearch-lib/src/blocks.txt Executable file
View File

@ -0,0 +1,163 @@
oak_sign
oak_wall_sign
oak_hanging_sign
oak_wall_hanging_sign
birch_sign
birch_wall_sign
birch_hanging_sign
birch_wall_hanging_sign
spruce_sign
spruce_wall_sign
spruce_hanging_sign
spruce_wall_hanging_sign
jungle_sign
jungle_wall_sign
jungle_hanging_sign
jungle_wall_hanging_sign
dark_oak_sign
dark_oak_wall_sign
dark_oak_hanging_sign
dark_oak_wall_hanging_sign
acacia_sign
acacia_wall_sign
acacia_hanging_sign
acacia_wall_hanging_sign
mangrove_sign
mangrove_wall_sign
mangrove_hanging_sign
mangrove_wall_hanging_sign
cherry_sign
cherry_wall_sign
cherry_hanging_sign
cherry_wall_hanging_sign
bamboo_sign
bamboo_wall_sign
bamboo_hanging_sign
bamboo_wall_hanging_sign
warped_sign
warped_wall_sign
warped_hanging_sign
warped_wall_hanging_sign
crimson_sign
crimson_wall_sign
crimson_hanging_sign
crimson_wall_hanging_sign
suspicious_gravel
suspicious_sand
white_banner
light_gray_banner
gray_banner
black_banner
brown_banner
red_banner
orange_banner
yellow_banner
lime_banner
green_banner
cyan_banner
light_blue_banner
blue_banner
purple_banner
magenta_banner
pink_banner
white_wall_banner
light_gray_wall_banner
gray_wall_banner
black_wall_banner
brown_wall_banner
red_wall_banner
orange_wall_banner
yellow_wall_banner
lime_wall_banner
green_wall_banner
cyan_wall_banner
light_blue_wall_banner
blue_wall_banner
purple_wall_banner
magenta_wall_banner
pink_wall_banner
white_bed
light_gray_bed
gray_bed
black_bed
brown_bed
red_bed
orange_bed
yellow_bed
lime_bed
green_bed
cyan_bed
light_blue_bed
blue_bed
purple_bed
magenta_bed
pink_bed
shulker_box
white_shulker_box
light_gray_shulker_box
gray_shulker_box
black_shulker_box
brown_shulker_box
red_shulker_box
orange_shulker_box
yellow_shulker_box
lime_shulker_box
green_shulker_box
cyan_shulker_box
light_blue_shulker_box
blue_shulker_box
purple_shulker_box
magenta_shulker_box
pink_shulker_box
furnace
blast_furnace
smoker
chest
trapped_chest
ender_chest
enchanting_table
barrel
lectern
jukebox
bell
brewing_stand
bee_nest
beehive
decorated_pot
beacon
conduit
campfire
soul_campfire
redstone_comparator
hopper
dispenser
dropper
moving_piston
daylight_detector
sculk_sensor
calibrated_sculk_sensor
sculk_catalyst
sculk_shrieker
player_head
player_wall_head
wither_skeleton_skull
wither_skeleton_wall_skull
zombie_head
zombie_wall_head
skeleton_skull
skeleton_wall_skull
creeper_head
creeper_wall_head
piglin_head
piglin_wall_head
dragon_head
dragon_wall_head
chiseled_bookshelf
command_block
chain_command_block
repeating_command_block
structure_block
jigsaw_block
end_portal
end_gateway
monster_spawner

139
schemsearch-lib/src/lib.rs Normal file → Executable file
View File

@ -16,11 +16,10 @@
*/
pub mod pattern_mapper;
pub mod search;
pub mod nbt_search;
use serde::{Deserialize, Serialize};
use pattern_mapper::match_palette;
use schemsearch_files::Schematic;
use crate::pattern_mapper::match_palette_adapt;
use serde::{Serialize, Deserialize};
#[derive(Debug, Clone, Copy, Deserialize, Serialize)]
pub struct SearchBehavior {
@ -30,73 +29,15 @@ pub struct SearchBehavior {
pub air_as_any: bool,
pub ignore_entities: bool,
pub threshold: f32,
pub invalid_nbt: bool,
}
pub fn search(
schem: Schematic,
pattern_schem: &Schematic,
search_behavior: SearchBehavior,
) -> Vec<(u16, u16, u16, f32)> {
if schem.width < pattern_schem.width || schem.height < pattern_schem.height || schem.length < pattern_schem.length {
return vec![];
}
if pattern_schem.palette.len() > schem.palette.len() {
return vec![];
}
let pattern_schem = match_palette(&schem, &pattern_schem, search_behavior.ignore_block_data);
let mut matches: Vec<(u16, u16, u16, f32)> = Vec::new();
let pattern_data = pattern_schem.block_data.as_slice();
let schem_data = if search_behavior.ignore_block_data {
match_palette_adapt(&schem, &pattern_schem.palette, search_behavior.ignore_block_data)
} else {
schem.block_data
};
let schem_data = schem_data.as_slice();
let air_id = if search_behavior.ignore_air || search_behavior.air_as_any { pattern_schem.palette.get("minecraft:air").unwrap_or(&-1) } else { &-1};
let pattern_blocks = (pattern_schem.width * pattern_schem.height * pattern_schem.length) as f32;
let pattern_width = pattern_schem.width as usize;
let pattern_height = pattern_schem.height as usize;
let pattern_length = pattern_schem.length as usize;
let schem_width = schem.width as usize;
let schem_height = schem.height as usize;
let schem_length = schem.length as usize;
for y in 0..=schem_height - pattern_height {
for z in 0..=schem_length - pattern_length {
for x in 0..=schem_width - pattern_width {
let mut matching = 0;
for j in 0..pattern_height {
for k in 0..pattern_length {
for i in 0..pattern_width {
let index = (x + i) + schem_width * ((z + k) + (y + j) * schem_length);
let pattern_index = i + pattern_width * (k + j * pattern_length);
let data = unsafe {schem_data.get_unchecked(index) };
let pattern_data = unsafe { pattern_data.get_unchecked(pattern_index) };
if *data == *pattern_data || (search_behavior.ignore_air && *data == *air_id) || (search_behavior.air_as_any && *pattern_data == *air_id) {
matching += 1;
}
}
}
}
let matching_percent = matching as f32 / pattern_blocks;
if matching_percent >= search_behavior.threshold {
matches.push((x as u16, y as u16, z as u16, matching_percent));
}
}
}
}
return matches;
#[derive(Debug, Clone, Copy, Default, Deserialize, Serialize)]
pub struct Match {
pub x: u16,
pub y: u16,
pub z: u16,
pub percent: f32,
}
#[inline]
@ -108,42 +49,34 @@ pub fn normalize_data(data: &str, ignore_data: bool) -> &str {
}
}
pub fn parse_schematic(data: &Vec<u8>) -> Schematic {
if data[0] == 0x1f && data[1] == 0x8b {
// gzip
nbt::from_gzip_reader(data.as_slice()).unwrap()
} else {
// uncompressed
nbt::from_reader(data.as_slice()).unwrap()
}
}
#[allow(unused_imports)]
#[cfg(test)]
mod tests {
use std::path::{Path, PathBuf};
use schemsearch_files::Schematic;
use crate::pattern_mapper::strip_data;
use schemsearch_files::SpongeSchematic;
use crate::pattern_mapper::{match_palette, strip_data};
use crate::search::search;
use super::*;
#[test]
fn read_schematic() {
let schematic = Schematic::load(&PathBuf::from("../tests/simple.schem")).unwrap();
let schematic = SpongeSchematic::load(&PathBuf::from("../tests/simple.schem")).unwrap();
assert_eq!(schematic.width as usize * schematic.height as usize * schematic.length as usize, schematic.block_data.len());
assert_eq!(schematic.palette_max, schematic.palette.len() as i32);
}
#[test]
fn test_parse_function() {
let file = std::fs::File::open("../tests/simple.schem").expect("Failed to open file");
let schematic: Schematic = parse_schematic(&std::io::Read::bytes(file).map(|b| b.unwrap()).collect());
let schematic = SpongeSchematic::load(&PathBuf::from("../tests/simple.schem")).unwrap();
assert_eq!(schematic.width as usize * schematic.height as usize * schematic.length as usize, schematic.block_data.len());
assert_eq!(schematic.palette_max, schematic.palette.len() as i32);
}
#[test]
fn test_strip_schem() {
let schematic = Schematic::load(&PathBuf::from("../tests/simple.schem")).unwrap();
let schematic = SpongeSchematic::load(&PathBuf::from("../tests/simple.schem")).unwrap();
let stripped = strip_data(&schematic);
assert_eq!(stripped.palette.keys().any(|k| k.contains('[')), false);
@ -151,24 +84,24 @@ mod tests {
#[test]
fn test_match_palette() {
let schematic = Schematic::load(&PathBuf::from("../tests/simple.schem")).unwrap();
let endstone = Schematic::load(&PathBuf::from("../tests/endstone.schem")).unwrap();
let schematic = SpongeSchematic::load(&PathBuf::from("../tests/simple.schem")).unwrap();
let endstone = SpongeSchematic::load(&PathBuf::from("../tests/endstone.schem")).unwrap();
let _ = match_palette(&schematic, &endstone, true);
}
#[test]
fn test_match_palette_ignore_data() {
let schematic = Schematic::load(&PathBuf::from("../tests/simple.schem")).unwrap();
let endstone = Schematic::load(&PathBuf::from("../tests/endstone.schem")).unwrap();
let schematic = SpongeSchematic::load(&PathBuf::from("../tests/simple.schem")).unwrap();
let endstone = SpongeSchematic::load(&PathBuf::from("../tests/endstone.schem")).unwrap();
let _ = match_palette(&schematic, &endstone, false);
}
#[test]
pub fn test_big_search() {
let schematic = Schematic::load(&PathBuf::from("../tests/simple.schem")).unwrap();
let endstone = Schematic::load(&PathBuf::from("../tests/endstone.schem")).unwrap();
let schematic = SpongeSchematic::load(&PathBuf::from("../tests/simple.schem")).unwrap();
let endstone = SpongeSchematic::load(&PathBuf::from("../tests/endstone.schem")).unwrap();
let _ = search(schematic, &endstone, SearchBehavior {
ignore_block_data: true,
@ -176,14 +109,15 @@ mod tests {
ignore_entities: true,
ignore_air: false,
air_as_any: false,
threshold: 0.9
threshold: 0.9,
invalid_nbt: false
});
}
#[test]
pub fn test_search() {
let schematic = Schematic::load(&PathBuf::from("../tests/Random.schem")).unwrap();
let pattern = Schematic::load(&PathBuf::from("../tests/Pattern.schem")).unwrap();
let schematic = SpongeSchematic::load(&PathBuf::from("../tests/Random.schem")).unwrap();
let pattern = SpongeSchematic::load(&PathBuf::from("../tests/Pattern.schem")).unwrap();
let matches = search(schematic, &pattern, SearchBehavior {
ignore_block_data: true,
@ -191,18 +125,21 @@ mod tests {
ignore_entities: true,
ignore_air: false,
air_as_any: false,
threshold: 0.9
threshold: 0.9,
invalid_nbt: false
});
println!("{:?}", matches);
assert_eq!(matches.len(), 1);
assert_eq!(matches[0], (1, 0, 3, 1.0));
assert_eq!(matches[0].x, 1);
assert_eq!(matches[0].y, 0);
assert_eq!(matches[0].z, 3);
assert_eq!(matches[0].percent, 1.0);
}
#[test]
pub fn test_search_ws() {
let schematic = Schematic::load(&PathBuf::from("../tests/warships/GreyFly-by-Bosslar.schem")).unwrap();
let pattern = Schematic::load(&PathBuf::from("../tests/gray_castle_complex.schem")).unwrap();
let schematic = SpongeSchematic::load(&PathBuf::from("../tests/warships/GreyFly-by-Bosslar.schem")).unwrap();
let pattern = SpongeSchematic::load(&PathBuf::from("../tests/gray_castle_complex.schem")).unwrap();
let matches = search(schematic, &pattern, SearchBehavior {
ignore_block_data: false,
@ -210,10 +147,10 @@ mod tests {
ignore_entities: false,
ignore_air: false,
air_as_any: false,
threshold: 0.9
threshold: 0.9,
invalid_nbt: false
});
println!("{:?}", matches);
assert_eq!(matches.len(), 1);
}
}

110
schemsearch-lib/src/nbt_search.rs Executable file
View File

@ -0,0 +1,110 @@
use std::borrow::ToOwned;
use std::collections::HashSet;
use std::iter::Iterator;
use lazy_static::lazy_static;
use schemsearch_files::SpongeSchematic;
const NBT_BLOCKS: &str = include_str!("blocks.txt");
lazy_static! {
static ref NBT_BLOCKS_SET: HashSet<String> = {
NBT_BLOCKS.lines().map(|x| format!("minecraft:{}", x)).collect()
};
}
pub fn has_invalid_nbt(schem: SpongeSchematic) -> bool {
if schem.block_entities.is_empty() && schem.palette.keys().any(|v| NBT_BLOCKS_SET.contains(v)) {
return true;
}
let nbt_blocks = schem.palette.iter().filter(|(k, _)| NBT_BLOCKS_SET.contains(k.to_owned())).map(|(_, v)| *v).collect::<HashSet<i32>>();
for (i, block_entity) in schem.block_data.iter().enumerate() {
if nbt_blocks.contains(&*block_entity) {
// i = x + z * Width + y * Width * Length
let x = i % schem.width as usize;
let z = (i / schem.width as usize) % schem.length as usize;
let y = i / (schem.width as usize * schem.length as usize);
if schem.block_entities.iter().any(|e| !e.pos.eq(&[x as i32, y as i32, z as i32])) {
return true;
}
}
}
return false;
}
#[allow(unused_imports)]
#[cfg(test)]
mod tests {
use nbt::CompoundTag;
use schemsearch_files::{BlockEntity, SpongeSchematic};
use super::*;
#[test]
fn test_has_invalid_nbt() {
let schem = SpongeSchematic {
data_version: 1,
metadata: CompoundTag::new(),
width: 0,
height: 0,
length: 0,
offset: [0, 0, 0],
palette_max: 1,
palette: vec![("minecraft:chest".to_owned(), 1)].into_iter().collect(),
block_data: vec![1],
block_entities: vec![],
entities: None,
};
assert_eq!(has_invalid_nbt(schem), true);
}
#[test]
fn test_has_invalid_nbt_2() {
let schem = SpongeSchematic {
data_version: 1,
metadata: CompoundTag::new(),
width: 1,
height: 1,
length: 1,
offset: [0, 0, 0],
palette_max: 1,
palette: vec![("minecraft:chest".to_owned(), 1)].into_iter().collect(),
block_data: vec![1],
block_entities: vec![
BlockEntity {
id: "minecraft:chest".to_owned(),
pos: [0, 0, 0],
}
],
entities: None,
};
assert_eq!(has_invalid_nbt(schem), false);
}
#[test]
fn test_has_invalid_nbt_3() {
let schem = SpongeSchematic {
data_version: 1,
metadata: CompoundTag::new(),
width: 2,
height: 1,
length: 1,
offset: [0, 0, 0],
palette_max: 1,
palette: vec![("minecraft:chest".to_owned(), 1), ("minecraft:stone".to_owned(), 2)].into_iter().collect(),
block_data: vec![1, 2],
block_entities: vec![
BlockEntity {
id: "minecraft:chest".to_owned(),
pos: [1, 0, 0],
}
],
entities: None,
};
assert_eq!(has_invalid_nbt(schem), true);
}
}

View File

@ -15,11 +15,12 @@
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
use nbt::Map;
use schemsearch_files::Schematic;
use std::collections::HashMap;
use nbt::CompoundTag;
use schemsearch_files::SpongeSchematic;
use crate::normalize_data;
fn create_reverse_palette(schem: &Schematic) -> Vec<&str> {
fn create_reverse_palette(schem: &SpongeSchematic) -> Vec<&str> {
let mut reverse_palette = Vec::with_capacity(schem.palette_max as usize);
(0..schem.palette_max).for_each(|_| reverse_palette.push(""));
for (key, value) in schem.palette.iter() {
@ -28,15 +29,15 @@ fn create_reverse_palette(schem: &Schematic) -> Vec<&str> {
reverse_palette
}
pub fn strip_data(schem: &Schematic) -> Schematic {
pub fn strip_data(schem: &SpongeSchematic) -> SpongeSchematic {
let mut data: Vec<i32> = Vec::new();
let mut palette: Map<String, i32> = Map::new();
let mut palette: HashMap<String, i32> = HashMap::new();
let mut palette_max: i32 = 0;
let reverse_palette = create_reverse_palette(schem);
for block in schem.block_data.iter() {
let block_name = reverse_palette[*block as usize].clone();
let block_name = reverse_palette[*block as usize];
let block_name = block_name.split('[').next().unwrap().to_string();
let entry = palette.entry(block_name).or_insert_with(|| {
@ -47,9 +48,8 @@ pub fn strip_data(schem: &Schematic) -> Schematic {
data.push(*entry);
}
Schematic {
version: schem.version,
data_version: schem.data_version,
SpongeSchematic {
data_version: 1,
palette,
palette_max,
block_data: data,
@ -57,17 +57,17 @@ pub fn strip_data(schem: &Schematic) -> Schematic {
height: schem.height,
length: schem.length,
width: schem.width,
metadata: schem.metadata.clone(),
offset: schem.offset.clone(),
metadata: CompoundTag::new(),
offset: [0; 3],
entities: None,
}
}
pub fn match_palette_adapt(schem: &Schematic, matching_palette: &Map<String, i32>, ignore_data: bool) -> Vec<i32> {
let mut data: Vec<i32> = Vec::new();
pub fn match_palette_adapt(schem: &SpongeSchematic, matching_palette: &HashMap<String, i32>, ignore_data: bool) -> Vec<i32> {
let mut data = Vec::with_capacity(schem.block_data.len());
let reverse_palette = create_reverse_palette(schem);
for x in &schem.block_data {
for x in schem.block_data.as_slice().iter() {
let blockname = reverse_palette[*x as usize];
let blockname = if ignore_data { normalize_data(blockname, ignore_data) } else { blockname };
let block_id = match matching_palette.get(&*blockname) {
@ -81,10 +81,10 @@ pub fn match_palette_adapt(schem: &Schematic, matching_palette: &Map<String, i32
}
pub fn match_palette(
schem: &Schematic,
pattern: &Schematic,
schem: &SpongeSchematic,
pattern: &SpongeSchematic,
ignore_data: bool,
) -> Schematic {
) -> SpongeSchematic {
if ignore_data {
match_palette_internal(&strip_data(schem), &strip_data(pattern), ignore_data)
} else {
@ -93,24 +93,23 @@ pub fn match_palette(
}
fn match_palette_internal(
schem: &Schematic,
pattern: &Schematic,
schem: &SpongeSchematic,
pattern: &SpongeSchematic,
ignore_data: bool,
) -> Schematic {
) -> SpongeSchematic {
let data_pattern: Vec<i32> = match_palette_adapt(&pattern, &schem.palette, ignore_data);
Schematic {
version: pattern.version.clone(),
data_version: pattern.data_version.clone(),
SpongeSchematic {
data_version: 0,
palette: schem.palette.clone(),
palette_max: schem.palette_max,
block_data: data_pattern,
block_entities: pattern.block_entities.clone(),
height: pattern.height.clone(),
length: pattern.length.clone(),
width: pattern.width.clone(),
metadata: pattern.metadata.clone(),
offset: pattern.offset.clone(),
height: pattern.height,
length: pattern.length,
width: pattern.width,
metadata: CompoundTag::new(),
offset: [0; 3],
entities: None,
}
}

87
schemsearch-lib/src/search.rs Executable file
View File

@ -0,0 +1,87 @@
use math::round::ceil;
use schemsearch_files::SpongeSchematic;
use crate::{Match, SearchBehavior};
use crate::pattern_mapper::{match_palette, match_palette_adapt};
pub fn search(
schem: SpongeSchematic,
pattern_schem: &SpongeSchematic,
search_behavior: SearchBehavior,
) -> Vec<Match> {
if schem.width < pattern_schem.width || schem.height < pattern_schem.height || schem.length < pattern_schem.length {
return Vec::new();
}
if pattern_schem.palette.len() > schem.palette.len() {
return Vec::new();
}
let pattern_schem = match_palette(&schem, &pattern_schem, search_behavior.ignore_block_data);
let mut matches: Vec<Match> = Vec::with_capacity(4);
let pattern_data = pattern_schem.block_data.as_ptr();
let schem_data = if search_behavior.ignore_block_data {
match_palette_adapt(&schem, &pattern_schem.palette, search_behavior.ignore_block_data)
} else {
schem.block_data
};
let schem_data = schem_data.as_ptr();
let air_id = if search_behavior.ignore_air || search_behavior.air_as_any { pattern_schem.palette.get("minecraft:air").unwrap_or(&-1) } else { &-1};
let pattern_blocks = pattern_schem.block_data.len() as f32;
let i_pattern_blocks = pattern_blocks as i32;
let pattern_width = pattern_schem.width as usize;
let pattern_height = pattern_schem.height as usize;
let pattern_length = pattern_schem.length as usize;
let schem_width = schem.width as usize;
let schem_height = schem.height as usize;
let schem_length = schem.length as usize;
let skip_amount = ceil((pattern_blocks * (1.0 - search_behavior.threshold)) as f64, 0) as i32;
for y in 0..=schem_height - pattern_height {
for z in 0..=schem_length - pattern_length {
for x in 0..=schem_width - pattern_width {
let mut not_matching = 0;
'outer:
for j in 0..pattern_height {
for k in 0..pattern_length {
'inner:
for i in 0..pattern_width {
let index = (x + i) + schem_width * ((z + k) + (y + j) * schem_length);
let pattern_index = i + pattern_width * (k + j * pattern_length);
let data = unsafe { *schem_data.add(index) };
let pattern_data = unsafe { *pattern_data.add(pattern_index) };
if (search_behavior.ignore_air && data != *air_id) || (search_behavior.air_as_any && pattern_data != *air_id) {
continue 'inner;
}
if data != pattern_data {
not_matching += 1;
if not_matching >= skip_amount {
break 'outer;
}
}
}
}
}
if not_matching < skip_amount {
matches.push(Match {
x: x as u16,
y: y as u16,
z: z as u16,
percent: (i_pattern_blocks - not_matching) as f32 / pattern_blocks,
});
}
}
}
}
return matches;
}

View File

@ -1,13 +1,13 @@
[package]
name = "schemsearch-sql"
version = "0.1.1"
version = "0.1.3"
edition = "2021"
license = "AGPL-3.0-or-later"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
sqlx = { version = "0.6", features = [ "runtime-async-std-native-tls" , "mysql" ] }
sqlx = { version = "0.8", features = [ "runtime-async-std-native-tls" , "mysql" ] }
schemsearch-lib = { path = "../schemsearch-lib" }
schemsearch-files = { path = "../schemsearch-files" }