66 Commits

Author SHA1 Message Date
a64f24eb58 Fix block data key in read_blocks function
Updated the `read_blocks` function to retrieve `Data` instead of `BlockData` from the NBT structure. This resolves potential mismatches and ensures compatibility with the expected key in the NBT schema.
2025-05-11 19:51:49 +02:00
3f22799f7b Enhance time! macro for conditional compilation
The `time!` macro now only logs execution time in debug mode, avoiding unnecessary overhead in release builds. Added `CommandQueueProperties` and `Context` imports to `schemsearch-ocl-matcher` for potential future functionality.
2025-05-11 19:48:12 +02:00
881989a7bc Refactor NBT handling for version 3 schematics
Update logic to process "Schematic" key in NBT for version 3, ensuring compatibility with nested tags. Adjust function signatures to pass CompoundTag references, optimizing data handling and reducing unnecessary clones.
2025-05-04 20:26:26 +02:00
b04c01e737 Refactor code structure and improve performance by optimizing OpenCL kernel and adding timing macros; update Cargo.toml for release profile settings; enhance main.rs and sinks.rs for better readability and organization. 2025-04-11 16:04:10 +02:00
c554b1f164 Rename use_cpu to opencl and adjust logic accordingly. 2025-04-09 18:58:55 +02:00
c30c10e494 Enable lazy initialization for OpenCL availability check 2025-04-09 18:38:34 +02:00
f5286f7aec Enable lazy initialization for OpenCL availability check
Replaced direct calls to `ocl_available()` with a `OnceLock` for thread-safe and efficient lazy initialization. This avoids redundant checks and improves performance in scenarios where OpenCL is not repeatedly required.
2025-04-09 18:34:32 +02:00
8befbf4c7f Add CPU matching option and improve default behavior handling 2025-04-09 18:32:16 +02:00
35726def3e Add CPU matching option and improve default behavior handling 2025-04-09 18:30:51 +02:00
1d3d5b3e6e **Optimize ProQue initialization with OnceLock**
Replaced repeated ProQue initialization with a static OnceLock to ensure efficient and thread-safe reuse. This minimizes the overhead of recreating the ProQue instance while maintaining correct behavior by dynamically updating its dimensions.
2025-04-09 14:39:14 +02:00
47bbf25ac7 Remove deprecated crates and introduce OpenCL matcher integration 2025-04-09 13:41:50 +02:00
2a584e878f Fixing... 2024-04-27 22:30:29 +02:00
33f5fe03fe Merge pull request #11 from Chaoscaot/add-invalid-nbt-arg
🔧 Add invalid_nbt flag.
2024-04-27 21:55:13 +02:00
0e6f2c3f78 🔧 Add invalid_nbt flag. 2024-04-27 21:27:42 +02:00
82108d9e36 🛠️ Fix incorrect CSV format in OutputFormat::CSV. (#10) 2024-04-27 20:19:10 +02:00
d20940f89b Improve Performance 2023-08-20 15:37:23 +02:00
e3e6e9f759 Improve Performance 2023-08-09 09:22:24 +02:00
ccae2ba393 Merge pull request #9 from Chaoscaot/dependabot/cargo/sqlx-0.7
Update sqlx requirement from 0.6 to 0.7
2023-07-11 20:48:13 +02:00
6c6c95bedd Update sqlx requirement from 0.6 to 0.7
Updates the requirements on [sqlx](https://github.com/launchbadge/sqlx) to permit the latest version.
- [Changelog](https://github.com/launchbadge/sqlx/blob/main/CHANGELOG.md)
- [Commits](https://github.com/launchbadge/sqlx/compare/v0.6.0...v0.7.0)

---
updated-dependencies:
- dependency-name: sqlx
  dependency-type: direct:production
...

Signed-off-by: dependabot[bot] <support@github.com>
2023-07-10 10:49:15 +00:00
582079c80d Bump Version 2023-05-23 20:10:41 +02:00
e25aeab065 Fix Broken Schematics Loading 2023-05-23 20:07:23 +02:00
aee3a80267 Reduce FLOPs 2023-05-01 11:32:40 +02:00
5107e04497 Update README.md 2023-04-28 00:28:34 +02:00
a357da2ce8 Fix Tests 2023-04-24 22:52:03 +02:00
eb84adb4a3 Reduce Boilerplate 2023-04-24 19:04:44 +02:00
25c4e97f71 Merge branch 'chaoscaot/support_sponge_v1_v3' 2023-04-23 12:19:22 +02:00
7d9e7f28a9 Fix SQL 2023-04-23 12:17:59 +02:00
ef1f8ed38f Merge pull request #8 from Chaoscaot/chaoscaot/support_sponge_v1_v3
Add Support for Sponge Schematic V1 and V3
2023-04-22 21:55:51 +02:00
4671f38591 Bump Version 2023-04-22 21:55:08 +02:00
5cff84538d Fix Performance 2023-04-22 21:29:18 +02:00
9a0b0535c6 remove Serde 2023-04-22 21:03:00 +02:00
a47c2f44bd Something™️ 2023-04-22 16:39:21 +02:00
246927d840 idk what im doing 2023-04-14 17:56:28 +02:00
d1a01dc0c1 Faster but not working 2023-04-13 23:49:39 +02:00
e03a805bdb Something Working :D 2023-04-13 23:16:12 +02:00
9cca860db3 Some new Ideas 2023-04-13 16:16:02 +02:00
80f5191ae8 Merge branch 'master' into chaoscaot/support_sponge_v1_v3 2023-04-13 14:33:59 +02:00
3f20cbc17f Create CODE_OF_CONDUCT.md 2023-04-13 00:25:42 +02:00
733aaa9e72 Update dependabot.yml 2023-04-13 00:21:24 +02:00
14866df17d Create dependabot.yml 2023-04-13 00:20:53 +02:00
00e3d6fd0f Fix Cache 2023-04-05 13:07:14 +02:00
fb8f935617 Fix Cache and Bump Version 2023-04-05 13:05:15 +02:00
2a112ac49c Add Output Limit 2023-04-05 02:43:28 +02:00
e7c1fd1ef7 Fixing Something? 2023-04-05 00:33:21 +02:00
80eeaad5d5 Add output for machines 2023-04-04 22:38:02 +02:00
64158cf45b Remove Timer from Progressbar 2023-04-04 21:44:43 +02:00
e4b26755ea Revert "Print Progressbar to stdout"
This reverts commit 5607dcc72c.
2023-04-04 17:34:12 +02:00
5607dcc72c Print Progressbar to stdout 2023-04-04 17:29:04 +02:00
5c9bcfc2ec Add SQL to Makefile 2023-04-04 16:31:48 +02:00
a1b5449f06 Some basic tests and basic impls 2023-04-04 12:07:33 +02:00
1df33249c4 Add Makefile for easier building 2023-04-04 00:36:40 +02:00
ef2755115c Fix tests 2023-04-01 11:14:44 +02:00
b32aac0aba Fix naming and Tests 2023-04-01 11:08:57 +02:00
a9a3e70aef Update Roadmap 2023-04-01 11:07:12 +02:00
c477a52f92 Slowdown ProgressBar and add Stderr as output 2023-04-01 11:02:49 +02:00
818de6be47 Abstractions 2023-04-01 10:30:25 +02:00
8f15b42146 Add Issue Templates 2023-03-21 18:31:50 +01:00
b8d912881d Fix SQL-Interface 2023-03-19 21:18:40 +01:00
02404792a5 Bump Version 2023-03-19 11:59:39 +01:00
59272ed3e7 Add Complex Output Patterns 2023-03-19 11:57:22 +01:00
322ba65656 Add Caches 2023-03-18 10:27:34 +01:00
b082d6cd8d Fix Imports 2023-03-16 21:33:23 +01:00
f4bcde73f9 Add Roadmap 2023-03-16 21:09:41 +01:00
0e31714582 Add Roadmap 2023-03-16 21:08:56 +01:00
229c858d9a Optimize Workflows 2023-03-16 20:40:37 +01:00
abf6953172 Optimize Workflows 2023-03-16 20:40:17 +01:00
38 changed files with 1658 additions and 519 deletions

40
.github/ISSUE_TEMPLATE/bug.yml vendored Normal file
View File

@ -0,0 +1,40 @@
name: Bug Report
description: Create a report to fix a bug
labels: [bug]
title: "[BUG] <title>"
body:
- type: textarea
id: description
attributes:
label: Description
description: A clear and concise description of what the bug is.
validations:
required: true
- type: textarea
id: reproduction
attributes:
label: Reproduction
description: Steps to reproduce the behavior.
validations:
required: true
- type: textarea
id: expected-behavior
attributes:
label: Expected Behavior
description: A clear and concise description of what you expected to happen.
validations:
required: true
- type: textarea
id: actual-behavior
attributes:
label: Actual Behavior
description: A clear and concise description of what actually happened.
validations:
required: true
- type: textarea
id: additional-context
attributes:
label: Additional Context
description: Add any other context about the problem here.
validations:
required: false

35
.github/ISSUE_TEMPLATE/feature.yml vendored Normal file
View File

@ -0,0 +1,35 @@
name: Feature Request
description: Suggest an idea for this project
title: "[FEATURE] <title>"
labels: [enhancement]
body:
- type: textarea
id: description
attributes:
label: Description
description: A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
validations:
required: true
- type: textarea
id: solution
attributes:
label: Proposed Solution
description: A clear and concise description of what you want to happen.
validations:
required: true
- type: textarea
id: alternatives
attributes:
label: Alternatives
description: A clear and concise description of any alternative solutions or features you've considered.
validations:
required: false
- type: textarea
id: additional-context
attributes:
label: Additional Context
description: Add any other context or screenshots about the feature request here.
validations:
required: false

6
.github/dependabot.yml vendored Normal file
View File

@ -0,0 +1,6 @@
version: 2
updates:
- package-ecosystem: "cargo"
directory: "/"
schedule:
interval: "weekly"

View File

@ -16,18 +16,41 @@ jobs:
steps:
- uses: actions/checkout@v3
- name: Cache Cargo modules
id: cache-cargo
uses: actions/cache@v3
continue-on-error: false
with:
path: |
~/.cargo/registry
~/.cargo/git
key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }}
restore-keys: |
${{ runner.os }}-cargo-
- name: Build
run: cargo build --verbose
run: make debug
- name: Run tests
run: cargo test --verbose
build-realease:
run: cargo test --verbose -p schemsearch-lib
build-release:
needs:
- build
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Cache Cargo modules
id: cache-cargo
uses: actions/cache@v3
continue-on-error: false
with:
path: |
~/.cargo/registry
~/.cargo/git
key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }}
restore-keys: |
${{ runner.os }}-cargo-
- name: Build
run: cargo build --verbose --release
run: make
- name: Upload a Build Artifact
uses: actions/upload-artifact@v3.1.2
with:

View File

@ -19,8 +19,19 @@ jobs:
steps:
- uses: actions/checkout@v3
- name: Cache Cargo modules
id: cache-cargo
uses: actions/cache@v3
continue-on-error: false
with:
path: |
~/.cargo/registry
~/.cargo/git
key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }}
restore-keys: |
${{ runner.os }}-cargo-
- name: Build
run: cargo build --verbose --release
run: make
- name: Create Tarball
if: ${{ matrix.os != 'windows-latest' }}
run: tar -czvf schemsearch-cli-${{ matrix.os }}.tar.gz -C target/release schemsearch-cli

128
CODE_OF_CONDUCT.md Normal file
View File

@ -0,0 +1,128 @@
# Contributor Covenant Code of Conduct
## Our Pledge
We as members, contributors, and leaders pledge to make participation in our
community a harassment-free experience for everyone, regardless of age, body
size, visible or invisible disability, ethnicity, sex characteristics, gender
identity and expression, level of experience, education, socio-economic status,
nationality, personal appearance, race, religion, or sexual identity
and orientation.
We pledge to act and interact in ways that contribute to an open, welcoming,
diverse, inclusive, and healthy community.
## Our Standards
Examples of behavior that contributes to a positive environment for our
community include:
* Demonstrating empathy and kindness toward other people
* Being respectful of differing opinions, viewpoints, and experiences
* Giving and gracefully accepting constructive feedback
* Accepting responsibility and apologizing to those affected by our mistakes,
and learning from the experience
* Focusing on what is best not just for us as individuals, but for the
overall community
Examples of unacceptable behavior include:
* The use of sexualized language or imagery, and sexual attention or
advances of any kind
* Trolling, insulting or derogatory comments, and personal or political attacks
* Public or private harassment
* Publishing others' private information, such as a physical or email
address, without their explicit permission
* Other conduct which could reasonably be considered inappropriate in a
professional setting
## Enforcement Responsibilities
Community leaders are responsible for clarifying and enforcing our standards of
acceptable behavior and will take appropriate and fair corrective action in
response to any behavior that they deem inappropriate, threatening, offensive,
or harmful.
Community leaders have the right and responsibility to remove, edit, or reject
comments, commits, code, wiki edits, issues, and other contributions that are
not aligned to this Code of Conduct, and will communicate reasons for moderation
decisions when appropriate.
## Scope
This Code of Conduct applies within all community spaces, and also applies when
an individual is officially representing the community in public spaces.
Examples of representing our community include using an official e-mail address,
posting via an official social media account, or acting as an appointed
representative at an online or offline event.
## Enforcement
Instances of abusive, harassing, or otherwise unacceptable behavior may be
reported to the community leaders responsible for enforcement at
chaoscaot@zohomail.eu.
All complaints will be reviewed and investigated promptly and fairly.
All community leaders are obligated to respect the privacy and security of the
reporter of any incident.
## Enforcement Guidelines
Community leaders will follow these Community Impact Guidelines in determining
the consequences for any action they deem in violation of this Code of Conduct:
### 1. Correction
**Community Impact**: Use of inappropriate language or other behavior deemed
unprofessional or unwelcome in the community.
**Consequence**: A private, written warning from community leaders, providing
clarity around the nature of the violation and an explanation of why the
behavior was inappropriate. A public apology may be requested.
### 2. Warning
**Community Impact**: A violation through a single incident or series
of actions.
**Consequence**: A warning with consequences for continued behavior. No
interaction with the people involved, including unsolicited interaction with
those enforcing the Code of Conduct, for a specified period of time. This
includes avoiding interactions in community spaces as well as external channels
like social media. Violating these terms may lead to a temporary or
permanent ban.
### 3. Temporary Ban
**Community Impact**: A serious violation of community standards, including
sustained inappropriate behavior.
**Consequence**: A temporary ban from any sort of interaction or public
communication with the community for a specified period of time. No public or
private interaction with the people involved, including unsolicited interaction
with those enforcing the Code of Conduct, is allowed during this period.
Violating these terms may lead to a permanent ban.
### 4. Permanent Ban
**Community Impact**: Demonstrating a pattern of violation of community
standards, including sustained inappropriate behavior, harassment of an
individual, or aggression toward or disparagement of classes of individuals.
**Consequence**: A permanent ban from any sort of public interaction within
the community.
## Attribution
This Code of Conduct is adapted from the [Contributor Covenant][homepage],
version 2.0, available at
https://www.contributor-covenant.org/version/2/0/code_of_conduct.html.
Community Impact Guidelines were inspired by [Mozilla's code of conduct
enforcement ladder](https://github.com/mozilla/diversity).
[homepage]: https://www.contributor-covenant.org
For answers to common questions about this code of conduct, see the FAQ at
https://www.contributor-covenant.org/faq. Translations are available at
https://www.contributor-covenant.org/translations.

7
Cargo.toml Normal file → Executable file
View File

@ -2,11 +2,10 @@
members = [
"schemsearch-cli",
"schemsearch-lib",
"schemsearch_faster",
"schemsearch-files",
"schemsearch-sql",
"schemsearch-java"
]
"schemsearch-ocl-matcher"]
resolver = "2"
[profile.small]
inherits = "release"
@ -16,4 +15,4 @@ opt-level = "z"
codegen-units = 1
[profile.release]
lto = true
debug = true

28
Makefile Normal file
View File

@ -0,0 +1,28 @@
default:
@echo "Building (Release)...";
cargo rustc --release --color=always -p schemsearch-cli -- -C target-feature=+avx2
sql:
@echo "Building (Release)...";
cargo rustc --release --color=always -p schemsearch-cli --features sql -- -C target-feature=+avx2
debug:
@echo "Building (Debug)...";
cargo build -p schemsearch-cli
install: default
@echo "Installing...";
install -Dm755 target/release/schemsearch-cli /usr/bin/schemsearch
uninstall:
@echo "Uninstalling...";
rm -f /usr/bin/schemsearch
java:
@echo "Building Java...";
@echo "WARNING: This is WORK IN PROGRESS!";
javac SchemSearch.java
clean:
@echo "Cleaning...";
cargo clean

View File

@ -1,14 +1,14 @@
# schemsearch
### A *simple* CLI tool to search in Sponge V2 Schematic files
### A *simple* CLI tool to search in Sponge Schematic files
---
## WARNING: This is a work in progress and is really simple right now. It will be improved in the future.
| Feature | Status |
|------------------------|--------|
|---------------------------|--------|
| Block search | ✅ |
| Block data less search | ✅ |
| Tile entities search | ❌ |
| Tile entities data search | ❌ |
| Entities search | ❌ |
---
@ -40,6 +40,16 @@ schemsearch-cli --help
---
## Roadmap
A list of features that are planned to be implemented in the future. In order of priority.
- [ ] Use AVX2 for faster search
- [ ] Tile entities data search
- [ ] Entities search
- [ ] McEdit Schematic support
---
## Building
This project is build using Rust for the CLI and library. It can be built using Cargo.
```bash

7
schemsearch-cli/Cargo.toml Normal file → Executable file
View File

@ -1,21 +1,24 @@
[package]
name = "schemsearch-cli"
version = "0.1.0"
version = "0.1.7"
edition = "2021"
license = "AGPL-3.0-or-later"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
schemsearch-common = { path = "../schemsearch-common" }
schemsearch-lib = { path = "../schemsearch-lib" }
schemsearch-files = { path = "../schemsearch-files" }
schemsearch-sql = { path = "../schemsearch-sql", optional = true }
clap = { version = "4.1.8", features = ["cargo"] }
futures = { version = "0.3", optional = true }
sqlx = { version = "0.6", features = [ "runtime-async-std-native-tls" , "mysql" ], optional = true }
sqlx = { version = "0.7", features = [ "runtime-async-std-native-tls" , "mysql" ], optional = true }
rayon = "1.7.0"
indicatif = { version = "0.17.3", features = ["rayon"] }
serde = "1.0.157"
serde_json = "1.0.94"
[features]
sql = ["dep:schemsearch-sql", "dep:futures", "dep:sqlx"]

View File

@ -0,0 +1,29 @@
use serde::{Deserialize, Serialize};
use schemsearch_common::{Match, SearchBehavior};
#[derive(Serialize, Deserialize, Debug)]
#[serde(tag = "event")]
pub enum JsonEvent {
Found(FoundEvent),
Init(InitEvent),
End(EndEvent),
}
#[derive(Serialize, Deserialize, Debug)]
pub struct FoundEvent {
pub name: String,
#[serde(flatten, rename = "match")]
pub match_: Match,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct InitEvent {
pub total: u32,
pub search_behavior: SearchBehavior,
pub start_time: u128,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct EndEvent {
pub end_time: u128,
}

327
schemsearch-cli/src/main.rs Normal file → Executable file
View File

@ -15,29 +15,35 @@
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
mod json_output;
mod sinks;
mod stderr;
mod types;
use std::fmt::Debug;
use std::fs::File;
use std::io;
use std::io::{BufWriter, Write};
use clap::{command, Arg, ArgAction, ValueHint};
use schemsearch_files::Schematic;
use std::path::PathBuf;
use crate::sinks::{OutputFormat, OutputSink};
use crate::stderr::MaschineStdErr;
#[cfg(feature = "sql")]
use crate::types::SqlSchematicSupplier;
use crate::types::{PathSchematicSupplier, SchematicSupplier, SchematicSupplierType};
use clap::error::ErrorKind;
use schemsearch_lib::{search, SearchBehavior};
use crate::types::{PathSchematicSupplier, SchematicSupplierType};
use clap::{command, Arg, ArgAction, ValueHint};
#[cfg(feature = "sql")]
use futures::executor::block_on;
use indicatif::*;
use rayon::prelude::*;
use rayon::ThreadPoolBuilder;
use schemsearch_common::{Match, SearchBehavior};
use schemsearch_files::SpongeSchematic;
use schemsearch_lib::nbt_search::has_invalid_nbt;
use schemsearch_lib::search::search;
#[cfg(feature = "sql")]
use schemsearch_sql::filter::SchematicFilter;
#[cfg(feature = "sql")]
use schemsearch_sql::load_all_schematics;
#[cfg(feature = "sql")]
use crate::types::SqlSchematicSupplier;
use indicatif::{ProgressBar, ParallelProgressIterator, ProgressStyle};
use std::fmt::Debug;
use std::io::Write;
use std::path::PathBuf;
use std::str::FromStr;
fn main() {
#[allow(unused_mut)]
@ -45,8 +51,8 @@ fn main() {
.arg(
Arg::new("pattern")
.help("The pattern to search for")
.required(true)
.value_hint(ValueHint::FilePath)
.required_unless_present("invalid-nbt")
.action(ArgAction::Set),
)
.arg(
@ -90,22 +96,41 @@ fn main() {
.long("air-as-any")
.action(ArgAction::SetTrue),
)
.arg(
Arg::new("invalid-nbt")
.help("Search for Schematics with Invalid or missing NBT data")
.short('I')
.long("invalid-nbt")
.action(ArgAction::SetTrue),
)
.arg(
Arg::new("output")
.help("The output format")
.help("The output format and path [Format:Path] available formats: text, json, csv; available paths: std, err, (file path)")
.short('o')
.long("output")
.action(ArgAction::Append)
.default_value("std")
.value_parser(["std_csv", "file_csv", "std", "file"]),
)
.arg(
Arg::new("output-file")
.help("The output file")
.short('O')
.long("output-file")
.value_hint(ValueHint::FilePath)
.action(ArgAction::Append)
.default_value("text:std")
.value_parser(|s: &str| {
let mut split = s.splitn(2, ':');
let format = match split.next() {
None => return Err("No format specified".to_string()),
Some(x) => x
};
let path = match split.next() {
None => return Err("No path specified".to_string()),
Some(x) => x
};
let format = match OutputFormat::from_str(format) {
Ok(x) => x,
Err(e) => return Err(e.to_string()),
};
let path = match OutputSink::from_str(path) {
Ok(x) => x,
Err(e) => return Err(e.to_string()),
};
Ok((format, path))
}),
)
.arg(
Arg::new("threshold")
@ -118,13 +143,38 @@ fn main() {
)
.arg(
Arg::new("threads")
.help("The number of threads to use [0 = Available Threads]")
.help("The number of threads to use [0 = all Available Threads]")
.short('T')
.long("threads")
.action(ArgAction::Set)
.default_value("0")
.value_parser(|s: &str| s.parse::<usize>().map_err(|e| e.to_string())),
)
.arg(
Arg::new("machine")
.help("Output for machines")
.short('m')
.long("machine")
.action(ArgAction::Set)
.default_value("0")
.value_parser(|s: &str| s.parse::<u16>().map_err(|e| e.to_string()))
)
.arg(
Arg::new("limit")
.help("The maximum number of matches to return [0 = Unlimited]")
.short('l')
.long("limit")
.action(ArgAction::Set)
.default_value("50")
.value_parser(|s: &str| s.parse::<usize>().map_err(|e| e.to_string())),
)
.arg(
Arg::new("opencl")
.help("Use OpenCL Checker")
.short('c')
.long("opencl")
.action(ArgAction::SetTrue),
)
.about("Searches for a pattern in a schematic")
.bin_name("schemsearch");
@ -169,19 +219,30 @@ fn main() {
ignore_air: matches.get_flag("ignore-air"),
air_as_any: matches.get_flag("air-as-any"),
ignore_entities: matches.get_flag("ignore-entities"),
threshold: *matches.get_one::<f32>("threshold").expect("Couldn't get threshold"),
threshold: *matches
.get_one::<f32>("threshold")
.expect("Couldn't get threshold"),
invalid_nbt: matches.get_flag("invalid-nbt"),
opencl: matches.get_flag("opencl"),
};
let pattern = match Schematic::load(&PathBuf::from(matches.get_one::<String>("pattern").unwrap())) {
Ok(x) => x,
let pattern = match matches.get_one::<String>("pattern") {
Some(p) => match SpongeSchematic::load(&PathBuf::from(p)) {
Ok(x) => Some(x),
Err(e) => {
cmd.error(ErrorKind::Io, format!("Error while loading Pattern: {}", e.to_string())).exit();
cmd.error(
ErrorKind::Io,
format!("Error while loading Pattern: {}", e.to_string()),
)
.exit();
}
},
None => None,
};
let mut schematics: Vec<SchematicSupplierType> = Vec::new();
match matches.get_many::<String>("schematic") {
None => {},
None => {}
Some(x) => {
let paths = x.map(|x| PathBuf::from(x));
for path in paths {
@ -192,12 +253,12 @@ fn main() {
.filter(|x| x.path().is_file())
.filter(|x| x.path().extension().unwrap().to_str().unwrap() == "schem")
.for_each(|x| {
schematics.push(SchematicSupplierType::PATH(Box::new(PathSchematicSupplier {
schematics.push(SchematicSupplierType::PATH(PathSchematicSupplier {
path: x.path(),
})))
}))
});
} else if path.extension().unwrap().to_str().unwrap() == "schem" {
schematics.push(SchematicSupplierType::PATH(Box::new(PathSchematicSupplier { path })));
schematics.push(SchematicSupplierType::PATH(PathSchematicSupplier { path }));
}
}
}
@ -213,128 +274,174 @@ fn main() {
filter = filter.name(x.collect());
}
for schem in block_on(load_all_schematics(filter)) {
schematics.push(SchematicSupplierType::SQL(SqlSchematicSupplier{
node: schem
schematics.push(SchematicSupplierType::SQL(SqlSchematicSupplier {
node: schem,
}))
};
}
}
if schematics.is_empty() {
cmd.error(ErrorKind::MissingRequiredArgument, "No schematics specified").exit();
cmd.error(
ErrorKind::MissingRequiredArgument,
"No schematics specified",
)
.exit();
}
let mut output_std = false;
let mut output_std_csv = false;
let mut output_file_csv = false;
let mut output_file = false;
let output: Vec<&(OutputFormat, OutputSink)> = matches
.get_many::<(OutputFormat, OutputSink)>("output")
.expect("Error")
.collect();
let mut output: Vec<(OutputFormat, Box<dyn Write>)> = output
.into_iter()
.map(|x| (x.0.clone(), x.1.output()))
.collect();
for x in matches.get_many::<String>("output").expect("Couldn't get output") {
match x.as_str() {
"std" => output_std = true,
"std_csv" => output_std_csv = true,
"file_csv" => output_file_csv = true,
"file" => output_file = true,
_ => {}
for x in &mut output {
write!(
x.1,
"{}",
x.0.start(
schematics.len() as u32,
&search_behavior,
start.elapsed().as_millis()
)
)
.unwrap();
}
};
let file: Option<File>;
let mut file_out: Option<BufWriter<File>> = None;
if output_file || output_file_csv {
let output_file_path = match matches.get_one::<String>("output-file") {
None => {
cmd.error(ErrorKind::MissingRequiredArgument, "No output file specified").exit();
}
Some(x) => x
};
ThreadPoolBuilder::new()
.num_threads(
*matches
.get_one::<usize>("threads")
.expect("Could not get threads"),
)
.build_global()
.unwrap();
file = match File::create(output_file_path) {
Ok(x) => Some(x),
Err(e) => {
cmd.error(ErrorKind::Io, format!("Error while creating output file: {}", e.to_string())).exit();
let bar = ProgressBar::new(schematics.len() as u64); // "maschine"
bar.set_style(
ProgressStyle::with_template("[{elapsed}, ETA: {eta}] {wide_bar} {pos}/{len} {per_sec}")
.unwrap(),
);
let term_size = *matches
.get_one::<u16>("machine")
.expect("Could not get machine");
if term_size != 0 {
bar.set_draw_target(ProgressDrawTarget::term_like(Box::new(MaschineStdErr {
size: term_size,
})))
}
};
file_out = Some(BufWriter::new(file.unwrap()));
}
ThreadPoolBuilder::new().num_threads(*matches.get_one::<usize>("threads").expect("Could not get threads")).build_global().unwrap();
let matches: Vec<Result> = schematics.par_iter().progress_with_style(ProgressStyle::with_template("[{elapsed}, ETA: {eta}] {wide_bar} {pos}/{len} {per_sec}").unwrap()).map(|schem| {
match schem {
let max_matching = *matches
.get_one::<usize>("limit")
.expect("Could not get max-matching");
let matches: Vec<SearchResult> = schematics
.par_iter()
.progress_with(bar)
.map(|schem| match schem {
SchematicSupplierType::PATH(schem) => {
let schematic = match load_schem(&schem.path) {
Some(x) => x,
None => return Result {
None => {
return SearchResult {
name: schem.get_name(),
matches: vec![]
matches: Vec::default(),
}
}
};
Result {
name: schem.get_name(),
matches: search(schematic, &pattern, search_behavior)
}
search_in_schem(schematic, pattern.as_ref(), search_behavior, schem)
}
#[cfg(feature = "sql")]
SchematicSupplierType::SQL(schem) => {
match schem.get_schematic() {
SchematicSupplierType::SQL(schem) => match schem.get_schematic() {
Ok(schematic) => {
Result {
name: schem.get_name(),
matches: search(schematic, &pattern, search_behavior)
}
search_in_schem(schematic, pattern.as_ref(), search_behavior, schem)
}
Err(e) => {
if !output_std && !output_std_csv {
println!("Error while loading schematic ({}): {}", schem.get_name(), e.to_string());
}
Result {
eprintln!(
"Error while loading schematic ({}): {}",
schem.get_name(),
e.to_string()
);
SearchResult {
name: schem.get_name(),
matches: vec![]
matches: Vec::default(),
}
}
}
}
}
}).collect();
},
})
.collect();
let stdout = io::stdout();
let mut lock = stdout.lock();
let mut matches_count = 0;
for matching in matches {
'outer: for matching in matches {
let schem_name = matching.name;
let matching = matching.matches;
for x in matching {
if output_std {
writeln!(lock, "Found match in '{}' at x: {}, y: {}, z: {}, % = {}", schem_name, x.0, x.1, x.2, x.3).unwrap();
for out in &mut output {
write!(out.1, "{}", out.0.found_match(&schem_name, x)).unwrap();
}
if output_std_csv {
writeln!(lock, "{},{},{},{},{}", schem_name, x.0, x.1, x.2, x.3).unwrap();
}
if output_file {
writeln!(file_out.as_mut().unwrap(), "Found match in '{}' at x: {}, y: {}, z: {}, % = {}", schem_name, x.0, x.1, x.2, x.3).unwrap();
}
if output_file_csv {
writeln!(file_out.as_mut().unwrap(), "{},{},{},{},{}", schem_name, x.0, x.1, x.2, x.3).unwrap();
matches_count += 1;
if max_matching != 0 && matches_count >= max_matching {
break 'outer;
}
}
}
let end = std::time::Instant::now();
println!("Finished in {:.2}s! Searched in {} Schematics", end.duration_since(start).as_secs_f32(), schematics.len());
for x in &mut output {
write!(x.1, "{}", x.0.end(start.elapsed())).unwrap();
x.1.flush().unwrap();
}
}
fn load_schem(schem_path: &PathBuf) -> Option<Schematic> {
match Schematic::load(schem_path) {
fn search_in_schem(
schematic: SpongeSchematic,
pattern: Option<&SpongeSchematic>,
search_behavior: SearchBehavior,
schem: &impl SchematicSupplier,
) -> SearchResult {
if search_behavior.invalid_nbt {
if has_invalid_nbt(schematic) {
SearchResult {
name: schem.get_name(),
matches: vec![Match {
x: 0,
y: 0,
z: 0,
percent: 1.0,
}],
}
} else {
SearchResult {
name: schem.get_name(),
matches: vec![],
}
}
} else {
SearchResult {
name: schem.get_name(),
matches: search(schematic, pattern.unwrap(), search_behavior),
}
}
}
fn load_schem(schem_path: &PathBuf) -> Option<SpongeSchematic> {
match SpongeSchematic::load(schem_path) {
Ok(x) => Some(x),
Err(e) => {
println!("Error while loading schematic ({}): {}", schem_path.to_str().unwrap(), e.to_string());
println!(
"Error while loading schematic ({}): {}",
schem_path.to_str().unwrap(),
e.to_string()
);
None
}
}
}
#[derive(Debug, Clone)]
struct Result {
struct SearchResult {
name: String,
matches: Vec<(u16, u16, u16, f32)>,
matches: Vec<Match>,
}

109
schemsearch-cli/src/sinks.rs Executable file
View File

@ -0,0 +1,109 @@
use crate::json_output::{EndEvent, FoundEvent, InitEvent, JsonEvent};
use indicatif::HumanDuration;
use schemsearch_common::{Match, SearchBehavior};
use std::fs::File;
use std::io::BufWriter;
use std::io::Write;
use std::str::FromStr;
use std::time::Duration;
#[derive(Debug, Clone)]
pub enum OutputSink {
Stdout,
Stderr,
File(String),
}
#[derive(Debug, Clone)]
pub enum OutputFormat {
Text,
CSV,
JSON,
}
impl FromStr for OutputFormat {
type Err = String;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"text" => Ok(OutputFormat::Text),
"csv" => Ok(OutputFormat::CSV),
"json" => Ok(OutputFormat::JSON),
_ => Err(format!("'{}' is not a valid output format", s)),
}
}
}
impl FromStr for OutputSink {
type Err = String;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"std" => Ok(OutputSink::Stdout),
"err" => Ok(OutputSink::Stderr),
_ => Ok(OutputSink::File(s.to_string())),
}
}
}
impl OutputSink {
pub fn output(&self) -> Box<dyn Write> {
match self {
OutputSink::Stdout => Box::new(std::io::stdout()),
OutputSink::Stderr => Box::new(std::io::stderr()),
OutputSink::File(path) => Box::new(BufWriter::new(File::create(path).unwrap())),
}
}
}
impl OutputFormat {
pub fn found_match(&self, name: &String, pos: Match) -> String {
match self {
OutputFormat::Text => format!(
"Found match in '{}' at x: {}, y: {}, z: {}, % = {}\n",
name, pos.x, pos.y, pos.z, pos.percent
),
OutputFormat::CSV => {
format!("{},{},{},{},{}\n", name, pos.x, pos.y, pos.z, pos.percent)
}
OutputFormat::JSON => format!(
"{}\n",
serde_json::to_string(&JsonEvent::Found(FoundEvent {
name: name.clone(),
match_: pos,
}))
.unwrap()
),
}
}
pub fn start(&self, total: u32, search_behavior: &SearchBehavior, start_time: u128) -> String {
match self {
OutputFormat::Text => format!("Starting search in {} schematics\n", total),
OutputFormat::CSV => "Name,X,Y,Z,Percent\n".to_owned(),
OutputFormat::JSON => format!(
"{}\n",
serde_json::to_string(&JsonEvent::Init(InitEvent {
total,
search_behavior: search_behavior.clone(),
start_time,
}))
.unwrap()
),
}
}
pub fn end(&self, end_time: Duration) -> String {
match self {
OutputFormat::Text => format!("Search complete in {:?}\n", end_time),
OutputFormat::CSV => format!("{:?}\n", end_time),
OutputFormat::JSON => format!(
"{}\n",
serde_json::to_string(&JsonEvent::End(EndEvent {
end_time: end_time.as_millis()
}))
.unwrap()
),
}
}
}

View File

@ -0,0 +1,44 @@
use std::fmt::Debug;
use std::io::Write;
use indicatif::TermLike;
#[derive(Debug)]
pub struct MaschineStdErr { pub(crate) size: u16}
impl TermLike for MaschineStdErr {
fn width(&self) -> u16 {
self.size
}
fn move_cursor_up(&self, _: usize) -> std::io::Result<()> {
Ok(())
}
fn move_cursor_down(&self, _: usize) -> std::io::Result<()> {
Ok(())
}
fn move_cursor_right(&self, _: usize) -> std::io::Result<()> {
Ok(())
}
fn move_cursor_left(&self, _: usize) -> std::io::Result<()> {
Ok(())
}
fn write_line(&self, s: &str) -> std::io::Result<()> {
writeln!(std::io::stderr(), "{}", s)
}
fn write_str(&self, s: &str) -> std::io::Result<()> {
write!(std::io::stderr(), "{}", s)
}
fn clear_line(&self) -> std::io::Result<()> {
Ok(())
}
fn flush(&self) -> std::io::Result<()> {
std::io::stderr().flush()
}
}

28
schemsearch-cli/src/types.rs Normal file → Executable file
View File

@ -15,25 +15,32 @@
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
#[cfg(feature = "sql")]
use std::io::Cursor;
use std::path::PathBuf;
#[cfg(feature = "sql")]
use futures::executor::block_on;
use schemsearch_files::Schematic;
#[cfg(feature = "sql")]
use schemsearch_files::SpongeSchematic;
#[cfg(feature = "sql")]
use schemsearch_sql::{load_schemdata, SchematicNode};
pub enum SchematicSupplierType {
PATH(Box<PathSchematicSupplier>),
PATH(PathSchematicSupplier),
#[cfg(feature = "sql")]
SQL(SqlSchematicSupplier),
}
pub trait SchematicSupplier {
fn get_name(&self) -> String;
}
pub struct PathSchematicSupplier {
pub path: PathBuf,
}
impl PathSchematicSupplier {
pub fn get_name(&self) -> String {
impl SchematicSupplier for PathSchematicSupplier {
fn get_name(&self) -> String {
self.path.file_stem().unwrap().to_str().unwrap().to_string()
}
}
@ -45,12 +52,17 @@ pub struct SqlSchematicSupplier {
#[cfg(feature = "sql")]
impl SqlSchematicSupplier {
pub fn get_schematic(&self) -> Result<Schematic, String> {
let schemdata = block_on(load_schemdata(self.node.id));
Schematic::load_data(schemdata.as_slice())
pub fn get_schematic(&self) -> Result<SpongeSchematic, String> {
let mut schemdata = block_on(load_schemdata(self.node.id));
SpongeSchematic::load_data(&mut Cursor::new(schemdata.as_mut_slice()))
}
}
pub fn get_name(&self) -> String {
#[cfg(feature = "sql")]
impl SchematicSupplier for SqlSchematicSupplier {
fn get_name(&self) -> String {
format!("{} ({})", self.node.name, self.node.id)
}
}

View File

@ -0,0 +1,7 @@
[package]
name = "schemsearch-common"
version = "0.1.0"
edition = "2021"
[dependencies]
serde = { version = "1.0.160", features = ["derive"] }

View File

@ -0,0 +1,56 @@
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Copy, Deserialize, Serialize)]
pub struct SearchBehavior {
pub ignore_block_data: bool,
pub ignore_block_entities: bool,
pub ignore_air: bool,
pub air_as_any: bool,
pub ignore_entities: bool,
pub threshold: f32,
pub invalid_nbt: bool,
pub opencl: bool,
}
impl Default for SearchBehavior {
fn default() -> Self {
SearchBehavior {
ignore_block_data: false,
ignore_block_entities: false,
ignore_air: false,
air_as_any: false,
ignore_entities: false,
threshold: 0.9,
invalid_nbt: false,
opencl: false,
}
}
}
#[derive(Debug, Clone, Copy, Default, Deserialize, Serialize)]
pub struct Match {
pub x: u16,
pub y: u16,
pub z: u16,
pub percent: f32,
}
#[macro_export]
macro_rules! time {
($name:ident, $body:block) => {
{
#[cfg(debug_assertions)]
{
let start = std::time::Instant::now();
let result = $body;
let duration = start.elapsed();
println!("{} took {:?}", stringify!($name), duration);
result
}
#[cfg(not(debug_assertions))]
{
$body
}
}
};
}

View File

@ -1,10 +1,11 @@
[package]
name = "schemsearch-files"
version = "0.1.0"
version = "0.1.5"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
hematite-nbt = "0.5.2"
serde = "1.0.152"
flate2 = "1.0.25"
named-binary-tag = "0.6"

View File

@ -15,81 +15,176 @@
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
use std::collections::hash_map::HashMap;
use std::io::Read;
use std::path::PathBuf;
use nbt::{Map, Value};
use serde::{Deserialize, Deserializer, Serialize};
use nbt::{CompoundTag, Tag};
#[derive(Serialize, Deserialize, Debug)]
pub struct Schematic {
#[serde(rename = "Version")]
pub version: i32,
#[serde(rename = "DataVersion")]
#[derive(Clone, Debug)]
pub struct SpongeSchematic {
pub data_version: i32,
#[serde(rename = "Metadata")]
pub metadata: Map<String, Value>,
#[serde(rename = "Width")]
pub metadata: CompoundTag,
pub width: u16,
#[serde(rename = "Height")]
pub height: u16,
#[serde(rename = "Length")]
pub length: u16,
#[serde(rename = "Offset")]
pub offset: [i32; 3],
#[serde(rename = "PaletteMax")]
pub palette_max: i32,
#[serde(rename = "Palette")]
pub palette: Map<String, i32>,
#[serde(rename = "BlockData", deserialize_with = "read_blockdata")]
pub palette: HashMap<String, i32>,
pub block_data: Vec<i32>,
#[serde(rename = "BlockEntities")]
pub block_entities: Vec<BlockEntity>,
#[serde(rename = "Entities")]
pub entities: Option<Vec<Entity>>,
}
fn read_blockdata<'de, D>(deserializer: D) -> Result<Vec<i32>, D::Error>
where
D: Deserializer<'de>,
{
let s: Vec<i8> = Deserialize::deserialize(deserializer)?;
Ok(read_varint_array(&s))
#[derive(Clone, Debug)]
pub struct BlockContainer {
pub palette: HashMap<String, i32>,
pub block_data: Vec<i32>,
pub block_entities: Vec<BlockEntity>,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
#[derive(Debug, Clone)]
pub struct BlockEntity {
#[serde(rename = "Id")]
pub id: String,
#[serde(rename = "Pos")]
pub pos: [i32; 3],
}
#[derive(Serialize, Deserialize, Debug, Clone)]
#[derive(Debug, Clone)]
pub struct BlockEntityV3 {
pub id: String,
pub pos: [i32; 3],
pub data: HashMap<String, Tag>,
}
#[derive(Debug, Clone)]
pub struct Entity {
#[serde(rename = "Id")]
pub id: String,
#[serde(rename = "Pos")]
pub pos: [i32; 3],
}
impl Schematic {
pub fn load_data<R>(data: R) -> Result<Schematic, String> where R: Read {
let schematic: Schematic = match nbt::from_gzip_reader(data) {
Ok(schem) => schem,
Err(e) => return Err(format!("Failed to parse schematic: {}", e))
impl SpongeSchematic {
pub fn load_data<R>(data: &mut R) -> Result<SpongeSchematic, String> where R: Read {
let nbt: CompoundTag = nbt::decode::read_gzip_compound_tag(data).map_err(|e| e.to_string())?;
let version = nbt.get_i32("Version").unwrap_or_else(|_| {
return if nbt.contains_key("Schematic") {
3
} else if nbt.contains_key("BlockEntities") {
2
} else if nbt.contains_key("TileEntities") {
1
} else {
-1
};
Ok(schematic)
});
match version {
1 => SpongeSchematic::from_nbt_1(nbt),
2 => SpongeSchematic::from_nbt_2(nbt),
3 => SpongeSchematic::from_nbt_3(nbt.get_compound_tag("Schematic").map_err(|e| e.to_string())?),
_ => Err("Invalid schematic: Unknown Version".to_string()),
}
}
pub fn load(path: &PathBuf) -> Result<Schematic, String> {
let file = match std::fs::File::open(path) {
Ok(x) => x,
Err(_) => return Err(format!("Failed to open file: {}", path.to_str().unwrap()))
};
Schematic::load_data(file)
pub fn load(path: &PathBuf) -> Result<SpongeSchematic, String> {
let mut file = std::fs::File::open(path).map_err(|e| e.to_string())?;
Self::load_data(&mut file)
}
pub fn from_nbt_1(nbt: CompoundTag) -> Result<Self, String> {
Ok(Self {
data_version: 0,
metadata: nbt.get_compound_tag("Metadata").map_err(|e| e.to_string())?.clone(),
width: nbt.get_i16("Width").map_err(|e| e.to_string())? as u16,
height: nbt.get_i16("Height").map_err(|e| e.to_string())? as u16,
length: nbt.get_i16("Length").map_err(|e| e.to_string())? as u16,
offset: read_offset(nbt.get_i32_vec("Offset").map_err(|e| e.to_string())?)?,
palette_max: nbt.get_i32("PaletteMax").map_err(|e| e.to_string())?,
palette: read_palette(nbt.get_compound_tag("Palette").map_err(|e| e.to_string())?),
block_data: read_blocks(nbt.get_i8_vec("BlockData").map_err(|e| e.to_string())?),
block_entities: read_tile_entities(nbt.get_compound_tag_vec("TileEntities").unwrap_or_else(|_| vec![]))?,
entities: None,
})
}
pub fn from_nbt_2(nbt: CompoundTag) -> Result<Self, String> {
Ok(Self{
data_version: nbt.get_i32("DataVersion").map_err(|e| e.to_string())?,
metadata: nbt.get_compound_tag("Metadata").map_err(|e| e.to_string())?.clone(),
width: nbt.get_i16("Width").map_err(|e| e.to_string())? as u16,
height: nbt.get_i16("Height").map_err(|e| e.to_string())? as u16,
length: nbt.get_i16("Length").map_err(|e| e.to_string())? as u16,
offset: read_offset(nbt.get_i32_vec("Offset").map_err(|e| e.to_string())?)?,
palette_max: nbt.get_i32("PaletteMax").map_err(|e| e.to_string())?,
palette: read_palette(nbt.get_compound_tag("Palette").map_err(|e| e.to_string())?),
block_data: read_blocks(nbt.get_i8_vec("BlockData").map_err(|e| e.to_string())?),
block_entities: read_tile_entities(nbt.get_compound_tag_vec("BlockEntities").unwrap_or_else(|_| vec![]))?,
entities: None,
})
}
pub fn from_nbt_3(nbt: &CompoundTag) -> Result<Self, String> {
let blocks = nbt.get_compound_tag("Blocks").map_err(|e| e.to_string())?;
Ok(Self{
data_version: nbt.get_i32("DataVersion").map_err(|e| e.to_string())?,
metadata: nbt.get_compound_tag("Metadata").map_err(|e| e.to_string())?.clone(),
width: nbt.get_i16("Width").map_err(|e| e.to_string())? as u16,
height: nbt.get_i16("Height").map_err(|e| e.to_string())? as u16,
length: nbt.get_i16("Length").map_err(|e| e.to_string())? as u16,
offset: read_offset(nbt.get_i32_vec("Offset").map_err(|e| e.to_string())?)?,
palette_max: compute_palette_max(blocks.get_compound_tag("Palette").map_err(|e| e.to_string())?),
palette: read_palette(blocks.get_compound_tag("Palette").map_err(|e| e.to_string())?),
block_data: read_blocks(blocks.get_i8_vec("Data").map_err(|e| e.to_string())?),
block_entities: read_tile_entities(blocks.get_compound_tag_vec("BlockEntities").unwrap_or_else(|_| vec![]))?,
entities: None,
})
}
}
fn read_tile_entities(tag: Vec<&CompoundTag>) -> Result<Vec<BlockEntity>, String> {
let mut tile_entities = Vec::new();
for t in tag {
tile_entities.push(BlockEntity {
id: t.get_str("Id").map_err(|e| e.to_string())?.to_string(),
pos: read_offset(t.get("Pos").map_err(|e| e.to_string())?)?,
});
}
Ok(tile_entities)
}
#[inline]
fn read_offset(offset: &Vec<i32>) -> Result<[i32; 3], String> {
match offset.len() {
3 => Ok([offset[0], offset[1], offset[2]]),
_ => Err("Invalid schematic: read_offset wrong length".to_string()),
}
}
#[inline]
fn read_palette(p: &CompoundTag) -> HashMap<String, i32> {
let mut palette = HashMap::new();
for (key, value) in p.iter() {
match value {
Tag::Int(n) => { palette.insert(key.clone(), *n); },
_ => {},
};
}
palette
}
#[inline]
fn compute_palette_max(palette: &CompoundTag) -> i32 {
palette.iter().map(|(_, v)| v).filter_map(|v| match v {
Tag::Int(n) => Some(*n),
_ => None,
}).max().unwrap_or(0)
}
#[inline]
fn read_blocks(blockdata: &Vec<i8>) -> Vec<i32> {
read_varint_array(blockdata)
}
#[inline]
pub fn read_varint_array(read: &Vec<i8>) -> Vec<i32> {
let mut data = Vec::new();
let mut value: i32 = 0;

View File

@ -1,16 +0,0 @@
[package]
name = "schemsearch-java"
version = "0.1.0"
edition = "2021"
license = "AGPL-3.0-or-later"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[lib]
crate_type = ["cdylib"]
[dependencies]
jni = "0.21.0"
schemsearch-lib = { path = "../schemsearch-lib" }
schemsearch-files = { path = "../schemsearch-files" }

View File

@ -1,54 +0,0 @@
/*
* Copyright (C) 2023 Chaoscaot
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
use std::path::PathBuf;
use jni::JNIEnv;
use jni::objects::{JClass, JString};
use jni::sys::jstring;
use schemsearch_files::Schematic;
use schemsearch_lib::{search, SearchBehavior};
#[no_mangle]
#[allow(unused_variables)]
pub extern "system" fn Java_SchemSearch_search<'local>(mut env: JNIEnv<'local>,
class: JClass<'local>,
schematic_path: JString<'local>,
pattern_path: JString<'local>) -> jstring {
let schematic_path: String = env.get_string(&schematic_path).expect("Couldn't get java string!").into();
let pattern_path: String = env.get_string(&pattern_path).expect("Couldn't get java string!").into();
let schematic = Schematic::load(&PathBuf::from(&schematic_path)).unwrap();
let pattern = Schematic::load(&PathBuf::from(&pattern_path)).unwrap();
let matches = search(schematic, &pattern, SearchBehavior {
ignore_block_data: true,
ignore_block_entities: true,
ignore_entities: true,
ignore_air: false,
air_as_any: false,
threshold: 0.0,
});
let mut result = String::new();
for (x, y, z, p) in matches {
result.push_str(&format!("{}, {}, {}, {};", x, y, z, p));
}
result.remove(result.len() - 1);
let output = env.new_string(result).expect("Couldn't create java string!");
output.into_raw()
}

9
schemsearch-lib/Cargo.toml Normal file → Executable file
View File

@ -1,12 +1,15 @@
[package]
name = "schemsearch-lib"
version = "0.1.0"
version = "0.1.7"
edition = "2021"
license = "AGPL-3.0-or-later"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
hematite-nbt = "0.5.2"
serde = "1.0.152"
schemsearch-files = { path = "../schemsearch-files" }
schemsearch-common = { path = "../schemsearch-common" }
schemsearch-ocl-matcher = { path = "../schemsearch-ocl-matcher" }
named-binary-tag = "0.6"
libmath = "0.2.1"
lazy_static = "1.4.0"

8
schemsearch-lib/src/.idea/modules.xml generated Normal file
View File

@ -0,0 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectModuleManager">
<modules>
<module fileurl="file://$PROJECT_DIR$/.idea/src.iml" filepath="$PROJECT_DIR$/.idea/src.iml" />
</modules>
</component>
</project>

8
schemsearch-lib/src/.idea/src.iml generated Normal file
View File

@ -0,0 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<module type="CPP_MODULE" version="4">
<component name="NewModuleRootManager">
<content url="file://$MODULE_DIR$" />
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
</component>
</module>

6
schemsearch-lib/src/.idea/vcs.xml generated Normal file
View File

@ -0,0 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="VcsDirectoryMappings">
<mapping directory="$PROJECT_DIR$/../.." vcs="Git" />
</component>
</project>

63
schemsearch-lib/src/.idea/workspace.xml generated Normal file
View File

@ -0,0 +1,63 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="CMakeSettings">
<configurations>
<configuration PROFILE_NAME="Debug" ENABLED="true" CONFIG_NAME="Debug" />
</configurations>
</component>
<component name="ChangeListManager">
<list default="true" id="352451bc-b368-403e-b1be-bfdcb573471f" name="Changes" comment="">
<change afterPath="$PROJECT_DIR$/../../schemsearch-py/Cargo.toml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/../../schemsearch-py/pyproject.toml" afterDir="false" />
<change afterPath="$PROJECT_DIR$/../../schemsearch-py/src/lib.rs" afterDir="false" />
<change beforePath="$PROJECT_DIR$/../../Cargo.toml" beforeDir="false" afterPath="$PROJECT_DIR$/../../Cargo.toml" afterDir="false" />
<change beforePath="$PROJECT_DIR$/../../SchemSearch.java" beforeDir="false" afterPath="$PROJECT_DIR$/../../SchemSearch.java" afterDir="false" />
</list>
<option name="SHOW_DIALOG" value="false" />
<option name="HIGHLIGHT_CONFLICTS" value="true" />
<option name="HIGHLIGHT_NON_ACTIVE_CHANGELIST" value="false" />
<option name="LAST_RESOLUTION" value="IGNORE" />
</component>
<component name="ClangdSettings">
<option name="formatViaClangd" value="false" />
</component>
<component name="Git.Settings">
<option name="RECENT_GIT_ROOT_PATH" value="$PROJECT_DIR$/../.." />
</component>
<component name="ProjectColorInfo"><![CDATA[{
"customColor": "",
"associatedIndex": 8
}]]></component>
<component name="ProjectId" id="2gFqSldpa6G5CPOKD9Sjp2GUcRW" />
<component name="ProjectViewState">
<option name="hideEmptyMiddlePackages" value="true" />
<option name="showLibraryContents" value="true" />
</component>
<component name="PropertiesComponent"><![CDATA[{
"keyToString": {
"RunOnceActivity.ShowReadmeOnStart": "true",
"RunOnceActivity.cidr.known.project.marker": "true",
"RunOnceActivity.readMode.enableVisualFormatting": "true",
"cf.first.check.clang-format": "false",
"cidr.known.project.marker": "true",
"git-widget-placeholder": "master",
"nodejs_package_manager_path": "npm",
"vue.rearranger.settings.migration": "true"
}
}]]></component>
<component name="SpellCheckerSettings" RuntimeDictionaries="0" Folders="0" CustomDictionaries="0" DefaultDictionary="application-level" UseSingleDictionary="true" transferred="true" />
<component name="TaskManager">
<task active="true" id="Default" summary="Default task">
<changelist id="352451bc-b368-403e-b1be-bfdcb573471f" name="Changes" comment="" />
<created>1715303674752</created>
<option name="number" value="Default" />
<option name="presentableId" value="Default" />
<updated>1715303674752</updated>
<workItem from="1715303675811" duration="8000" />
</task>
<servers />
</component>
<component name="TypeScriptGeneratedFilesManager">
<option name="version" value="3" />
</component>
</project>

163
schemsearch-lib/src/blocks.txt Executable file
View File

@ -0,0 +1,163 @@
oak_sign
oak_wall_sign
oak_hanging_sign
oak_wall_hanging_sign
birch_sign
birch_wall_sign
birch_hanging_sign
birch_wall_hanging_sign
spruce_sign
spruce_wall_sign
spruce_hanging_sign
spruce_wall_hanging_sign
jungle_sign
jungle_wall_sign
jungle_hanging_sign
jungle_wall_hanging_sign
dark_oak_sign
dark_oak_wall_sign
dark_oak_hanging_sign
dark_oak_wall_hanging_sign
acacia_sign
acacia_wall_sign
acacia_hanging_sign
acacia_wall_hanging_sign
mangrove_sign
mangrove_wall_sign
mangrove_hanging_sign
mangrove_wall_hanging_sign
cherry_sign
cherry_wall_sign
cherry_hanging_sign
cherry_wall_hanging_sign
bamboo_sign
bamboo_wall_sign
bamboo_hanging_sign
bamboo_wall_hanging_sign
warped_sign
warped_wall_sign
warped_hanging_sign
warped_wall_hanging_sign
crimson_sign
crimson_wall_sign
crimson_hanging_sign
crimson_wall_hanging_sign
suspicious_gravel
suspicious_sand
white_banner
light_gray_banner
gray_banner
black_banner
brown_banner
red_banner
orange_banner
yellow_banner
lime_banner
green_banner
cyan_banner
light_blue_banner
blue_banner
purple_banner
magenta_banner
pink_banner
white_wall_banner
light_gray_wall_banner
gray_wall_banner
black_wall_banner
brown_wall_banner
red_wall_banner
orange_wall_banner
yellow_wall_banner
lime_wall_banner
green_wall_banner
cyan_wall_banner
light_blue_wall_banner
blue_wall_banner
purple_wall_banner
magenta_wall_banner
pink_wall_banner
white_bed
light_gray_bed
gray_bed
black_bed
brown_bed
red_bed
orange_bed
yellow_bed
lime_bed
green_bed
cyan_bed
light_blue_bed
blue_bed
purple_bed
magenta_bed
pink_bed
shulker_box
white_shulker_box
light_gray_shulker_box
gray_shulker_box
black_shulker_box
brown_shulker_box
red_shulker_box
orange_shulker_box
yellow_shulker_box
lime_shulker_box
green_shulker_box
cyan_shulker_box
light_blue_shulker_box
blue_shulker_box
purple_shulker_box
magenta_shulker_box
pink_shulker_box
furnace
blast_furnace
smoker
chest
trapped_chest
ender_chest
enchanting_table
barrel
lectern
jukebox
bell
brewing_stand
bee_nest
beehive
decorated_pot
beacon
conduit
campfire
soul_campfire
redstone_comparator
hopper
dispenser
dropper
moving_piston
daylight_detector
sculk_sensor
calibrated_sculk_sensor
sculk_catalyst
sculk_shrieker
player_head
player_wall_head
wither_skeleton_skull
wither_skeleton_wall_skull
zombie_head
zombie_wall_head
skeleton_skull
skeleton_wall_skull
creeper_head
creeper_wall_head
piglin_head
piglin_wall_head
dragon_head
dragon_wall_head
chiseled_bookshelf
command_block
chain_command_block
repeating_command_block
structure_block
jigsaw_block
end_portal
end_gateway
monster_spawner

161
schemsearch-lib/src/lib.rs Normal file → Executable file
View File

@ -16,87 +16,10 @@
*/
pub mod pattern_mapper;
pub mod search;
pub mod nbt_search;
use pattern_mapper::match_palette;
use schemsearch_files::Schematic;
use crate::pattern_mapper::match_palette_adapt;
#[derive(Debug, Clone, Copy)]
pub struct SearchBehavior {
pub ignore_block_data: bool,
pub ignore_block_entities: bool,
pub ignore_air: bool,
pub air_as_any: bool,
pub ignore_entities: bool,
pub threshold: f32,
}
pub fn search(
schem: Schematic,
pattern_schem: &Schematic,
search_behavior: SearchBehavior,
) -> Vec<(u16, u16, u16, f32)> {
if schem.width < pattern_schem.width || schem.height < pattern_schem.height || schem.length < pattern_schem.length {
return vec![];
}
if pattern_schem.palette.len() > schem.palette.len() {
return vec![];
}
let pattern_schem = match_palette(&schem, &pattern_schem, search_behavior.ignore_block_data);
let mut matches: Vec<(u16, u16, u16, f32)> = Vec::new();
let pattern_data = pattern_schem.block_data.as_slice();
let schem_data = if search_behavior.ignore_block_data {
match_palette_adapt(&schem, &pattern_schem.palette, search_behavior.ignore_block_data)
} else {
schem.block_data
};
let schem_data = schem_data.as_slice();
let air_id = if search_behavior.ignore_air || search_behavior.air_as_any { pattern_schem.palette.get("minecraft:air").unwrap_or(&-1) } else { &-1};
let pattern_blocks = (pattern_schem.width * pattern_schem.height * pattern_schem.length) as f32;
let pattern_width = pattern_schem.width as usize;
let pattern_height = pattern_schem.height as usize;
let pattern_length = pattern_schem.length as usize;
let schem_width = schem.width as usize;
let schem_height = schem.height as usize;
let schem_length = schem.length as usize;
for y in 0..=schem_height - pattern_height {
for z in 0..=schem_length - pattern_length {
for x in 0..=schem_width - pattern_width {
let mut matching = 0;
for j in 0..pattern_height {
for k in 0..pattern_length {
for i in 0..pattern_width {
let index = (x + i) + schem_width * ((z + k) + (y + j) * schem_length);
let pattern_index = i + pattern_width * (k + j * pattern_length);
let data = unsafe {schem_data.get_unchecked(index) };
let pattern_data = unsafe { pattern_data.get_unchecked(pattern_index) };
if *data == *pattern_data || (search_behavior.ignore_air && *data == *air_id) || (search_behavior.air_as_any && *pattern_data == *air_id) {
matching += 1;
}
}
}
}
let matching_percent = matching as f32 / pattern_blocks;
if matching_percent >= search_behavior.threshold {
matches.push((x as u16, y as u16, z as u16, matching_percent));
}
}
}
}
return matches;
}
use schemsearch_common::SearchBehavior;
#[inline]
pub fn normalize_data(data: &str, ignore_data: bool) -> &str {
@ -107,42 +30,34 @@ pub fn normalize_data(data: &str, ignore_data: bool) -> &str {
}
}
pub fn parse_schematic(data: &Vec<u8>) -> Schematic {
if data[0] == 0x1f && data[1] == 0x8b {
// gzip
nbt::from_gzip_reader(data.as_slice()).unwrap()
} else {
// uncompressed
nbt::from_reader(data.as_slice()).unwrap()
}
}
#[allow(unused_imports)]
#[cfg(test)]
mod tests {
use std::path::{Path, PathBuf};
use schemsearch_files::Schematic;
use crate::pattern_mapper::strip_data;
use schemsearch_files::SpongeSchematic;
use crate::pattern_mapper::{match_palette, strip_data};
use crate::search::search;
use super::*;
#[test]
fn read_schematic() {
let schematic = Schematic::load(&PathBuf::from("../tests/simple.schem")).unwrap();
let schematic = SpongeSchematic::load(&PathBuf::from("../tests/simple.schem")).unwrap();
assert_eq!(schematic.width as usize * schematic.height as usize * schematic.length as usize, schematic.block_data.len());
assert_eq!(schematic.palette_max, schematic.palette.len() as i32);
}
#[test]
fn test_parse_function() {
let file = std::fs::File::open("../tests/simple.schem").expect("Failed to open file");
let schematic: Schematic = parse_schematic(&std::io::Read::bytes(file).map(|b| b.unwrap()).collect());
let schematic = SpongeSchematic::load(&PathBuf::from("../tests/simple.schem")).unwrap();
assert_eq!(schematic.width as usize * schematic.height as usize * schematic.length as usize, schematic.block_data.len());
assert_eq!(schematic.palette_max, schematic.palette.len() as i32);
}
#[test]
fn test_strip_schem() {
let schematic = Schematic::load(&PathBuf::from("../tests/simple.schem")).unwrap();
let schematic = SpongeSchematic::load(&PathBuf::from("../tests/simple.schem")).unwrap();
let stripped = strip_data(&schematic);
assert_eq!(stripped.palette.keys().any(|k| k.contains('[')), false);
@ -150,69 +65,49 @@ mod tests {
#[test]
fn test_match_palette() {
let schematic = Schematic::load(&PathBuf::from("../tests/simple.schem")).unwrap();
let endstone = Schematic::load(&PathBuf::from("../tests/endstone.schem")).unwrap();
let schematic = SpongeSchematic::load(&PathBuf::from("../tests/simple.schem")).unwrap();
let endstone = SpongeSchematic::load(&PathBuf::from("../tests/endstone.schem")).unwrap();
let _ = match_palette(&schematic, &endstone, true);
}
#[test]
fn test_match_palette_ignore_data() {
let schematic = Schematic::load(&PathBuf::from("../tests/simple.schem")).unwrap();
let endstone = Schematic::load(&PathBuf::from("../tests/endstone.schem")).unwrap();
let schematic = SpongeSchematic::load(&PathBuf::from("../tests/simple.schem")).unwrap();
let endstone = SpongeSchematic::load(&PathBuf::from("../tests/endstone.schem")).unwrap();
let _ = match_palette(&schematic, &endstone, false);
}
#[test]
pub fn test_big_search() {
let schematic = Schematic::load(&PathBuf::from("../tests/simple.schem")).unwrap();
let endstone = Schematic::load(&PathBuf::from("../tests/endstone.schem")).unwrap();
let schematic = SpongeSchematic::load(&PathBuf::from("../tests/simple.schem")).unwrap();
let endstone = SpongeSchematic::load(&PathBuf::from("../tests/endstone.schem")).unwrap();
let _ = search(schematic, &endstone, SearchBehavior {
ignore_block_data: true,
ignore_block_entities: true,
ignore_entities: true,
ignore_air: false,
air_as_any: false,
threshold: 0.9
});
let _ = search(schematic, &endstone, SearchBehavior::default());
}
#[test]
pub fn test_search() {
let schematic = Schematic::load(&PathBuf::from("../tests/Random.schem")).unwrap();
let pattern = Schematic::load(&PathBuf::from("../tests/Pattern.schem")).unwrap();
let schematic = SpongeSchematic::load(&PathBuf::from("../tests/Random.schem")).unwrap();
let pattern = SpongeSchematic::load(&PathBuf::from("../tests/Pattern.schem")).unwrap();
let matches = search(schematic, &pattern, SearchBehavior {
ignore_block_data: true,
ignore_block_entities: true,
ignore_entities: true,
ignore_air: false,
air_as_any: false,
threshold: 0.9
});
let matches = search(schematic, &pattern, SearchBehavior::default());
println!("{:?}", matches);
assert_eq!(matches.len(), 1);
assert_eq!(matches[0], (1, 0, 3, 1.0));
assert_eq!(matches[0].x, 1);
assert_eq!(matches[0].y, 0);
assert_eq!(matches[0].z, 3);
assert_eq!(matches[0].percent, 1.0);
}
#[test]
pub fn test_search_ws() {
let schematic = Schematic::load(&PathBuf::from("../tests/warships/GreyFly-by-Bosslar.schem")).unwrap();
let pattern = Schematic::load(&PathBuf::from("../tests/gray_castle_complex.schem")).unwrap();
let schematic = SpongeSchematic::load(&PathBuf::from("../tests/warships/GreyFly-by-Bosslar.schem")).unwrap();
let pattern = SpongeSchematic::load(&PathBuf::from("../tests/gray_castle_complex.schem")).unwrap();
let matches = search(schematic, &pattern, SearchBehavior {
ignore_block_data: false,
ignore_block_entities: false,
ignore_entities: false,
ignore_air: false,
air_as_any: false,
threshold: 0.9
});
let matches = search(schematic, &pattern, SearchBehavior::default());
println!("{:?}", matches);
assert_eq!(matches.len(), 1);
}
}

110
schemsearch-lib/src/nbt_search.rs Executable file
View File

@ -0,0 +1,110 @@
use std::borrow::ToOwned;
use std::collections::HashSet;
use std::iter::Iterator;
use lazy_static::lazy_static;
use schemsearch_files::SpongeSchematic;
const NBT_BLOCKS: &str = include_str!("blocks.txt");
lazy_static! {
static ref NBT_BLOCKS_SET: HashSet<String> = {
NBT_BLOCKS.lines().map(|x| format!("minecraft:{}", x)).collect()
};
}
pub fn has_invalid_nbt(schem: SpongeSchematic) -> bool {
if schem.block_entities.is_empty() && schem.palette.keys().any(|v| NBT_BLOCKS_SET.contains(v)) {
return true;
}
let nbt_blocks = schem.palette.iter().filter(|(k, _)| NBT_BLOCKS_SET.contains(k.to_owned())).map(|(_, v)| *v).collect::<HashSet<i32>>();
for (i, block_entity) in schem.block_data.iter().enumerate() {
if nbt_blocks.contains(&*block_entity) {
// i = x + z * Width + y * Width * Length
let x = i % schem.width as usize;
let z = (i / schem.width as usize) % schem.length as usize;
let y = i / (schem.width as usize * schem.length as usize);
if schem.block_entities.iter().any(|e| !e.pos.eq(&[x as i32, y as i32, z as i32])) {
return true;
}
}
}
return false;
}
#[allow(unused_imports)]
#[cfg(test)]
mod tests {
use nbt::CompoundTag;
use schemsearch_files::{BlockEntity, SpongeSchematic};
use super::*;
#[test]
fn test_has_invalid_nbt() {
let schem = SpongeSchematic {
data_version: 1,
metadata: CompoundTag::new(),
width: 0,
height: 0,
length: 0,
offset: [0, 0, 0],
palette_max: 1,
palette: vec![("minecraft:chest".to_owned(), 1)].into_iter().collect(),
block_data: vec![1],
block_entities: vec![],
entities: None,
};
assert_eq!(has_invalid_nbt(schem), true);
}
#[test]
fn test_has_invalid_nbt_2() {
let schem = SpongeSchematic {
data_version: 1,
metadata: CompoundTag::new(),
width: 1,
height: 1,
length: 1,
offset: [0, 0, 0],
palette_max: 1,
palette: vec![("minecraft:chest".to_owned(), 1)].into_iter().collect(),
block_data: vec![1],
block_entities: vec![
BlockEntity {
id: "minecraft:chest".to_owned(),
pos: [0, 0, 0],
}
],
entities: None,
};
assert_eq!(has_invalid_nbt(schem), false);
}
#[test]
fn test_has_invalid_nbt_3() {
let schem = SpongeSchematic {
data_version: 1,
metadata: CompoundTag::new(),
width: 2,
height: 1,
length: 1,
offset: [0, 0, 0],
palette_max: 1,
palette: vec![("minecraft:chest".to_owned(), 1), ("minecraft:stone".to_owned(), 2)].into_iter().collect(),
block_data: vec![1, 2],
block_entities: vec![
BlockEntity {
id: "minecraft:chest".to_owned(),
pos: [1, 0, 0],
}
],
entities: None,
};
assert_eq!(has_invalid_nbt(schem), true);
}
}

View File

@ -15,11 +15,12 @@
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
use nbt::Map;
use schemsearch_files::Schematic;
use std::collections::HashMap;
use nbt::CompoundTag;
use schemsearch_files::SpongeSchematic;
use crate::normalize_data;
fn create_reverse_palette(schem: &Schematic) -> Vec<&str> {
fn create_reverse_palette(schem: &SpongeSchematic) -> Vec<&str> {
let mut reverse_palette = Vec::with_capacity(schem.palette_max as usize);
(0..schem.palette_max).for_each(|_| reverse_palette.push(""));
for (key, value) in schem.palette.iter() {
@ -28,15 +29,15 @@ fn create_reverse_palette(schem: &Schematic) -> Vec<&str> {
reverse_palette
}
pub fn strip_data(schem: &Schematic) -> Schematic {
pub fn strip_data(schem: &SpongeSchematic) -> SpongeSchematic {
let mut data: Vec<i32> = Vec::new();
let mut palette: Map<String, i32> = Map::new();
let mut palette: HashMap<String, i32> = HashMap::new();
let mut palette_max: i32 = 0;
let reverse_palette = create_reverse_palette(schem);
for block in schem.block_data.iter() {
let block_name = reverse_palette[*block as usize].clone();
let block_name = reverse_palette[*block as usize];
let block_name = block_name.split('[').next().unwrap().to_string();
let entry = palette.entry(block_name).or_insert_with(|| {
@ -47,9 +48,8 @@ pub fn strip_data(schem: &Schematic) -> Schematic {
data.push(*entry);
}
Schematic {
version: schem.version,
data_version: schem.data_version,
SpongeSchematic {
data_version: 1,
palette,
palette_max,
block_data: data,
@ -57,17 +57,17 @@ pub fn strip_data(schem: &Schematic) -> Schematic {
height: schem.height,
length: schem.length,
width: schem.width,
metadata: schem.metadata.clone(),
offset: schem.offset.clone(),
metadata: CompoundTag::new(),
offset: [0; 3],
entities: None,
}
}
pub fn match_palette_adapt(schem: &Schematic, matching_palette: &Map<String, i32>, ignore_data: bool) -> Vec<i32> {
let mut data: Vec<i32> = Vec::new();
pub fn match_palette_adapt(schem: &SpongeSchematic, matching_palette: &HashMap<String, i32>, ignore_data: bool) -> Vec<i32> {
let mut data = Vec::with_capacity(schem.block_data.len());
let reverse_palette = create_reverse_palette(schem);
for x in &schem.block_data {
for x in schem.block_data.as_slice().iter() {
let blockname = reverse_palette[*x as usize];
let blockname = if ignore_data { normalize_data(blockname, ignore_data) } else { blockname };
let block_id = match matching_palette.get(&*blockname) {
@ -81,10 +81,10 @@ pub fn match_palette_adapt(schem: &Schematic, matching_palette: &Map<String, i32
}
pub fn match_palette(
schem: &Schematic,
pattern: &Schematic,
schem: &SpongeSchematic,
pattern: &SpongeSchematic,
ignore_data: bool,
) -> Schematic {
) -> SpongeSchematic {
if ignore_data {
match_palette_internal(&strip_data(schem), &strip_data(pattern), ignore_data)
} else {
@ -93,24 +93,23 @@ pub fn match_palette(
}
fn match_palette_internal(
schem: &Schematic,
pattern: &Schematic,
schem: &SpongeSchematic,
pattern: &SpongeSchematic,
ignore_data: bool,
) -> Schematic {
) -> SpongeSchematic {
let data_pattern: Vec<i32> = match_palette_adapt(&pattern, &schem.palette, ignore_data);
Schematic {
version: pattern.version.clone(),
data_version: pattern.data_version.clone(),
SpongeSchematic {
data_version: 0,
palette: schem.palette.clone(),
palette_max: schem.palette_max,
block_data: data_pattern,
block_entities: pattern.block_entities.clone(),
height: pattern.height.clone(),
length: pattern.length.clone(),
width: pattern.width.clone(),
metadata: pattern.metadata.clone(),
offset: pattern.offset.clone(),
height: pattern.height,
length: pattern.length,
width: pattern.width,
metadata: CompoundTag::new(),
offset: [0; 3],
entities: None,
}
}

119
schemsearch-lib/src/search.rs Executable file
View File

@ -0,0 +1,119 @@
use crate::pattern_mapper::{match_palette, match_palette_adapt};
use math::round::ceil;
use schemsearch_common::time;
use schemsearch_common::{Match, SearchBehavior};
use schemsearch_files::SpongeSchematic;
use schemsearch_ocl_matcher::ocl_search;
pub fn search(
schem: SpongeSchematic,
pattern_schem: &SpongeSchematic,
search_behavior: SearchBehavior,
) -> Vec<Match> {
if schem.width < pattern_schem.width
|| schem.height < pattern_schem.height
|| schem.length < pattern_schem.length
{
return Vec::new();
}
if pattern_schem.palette.len() > schem.palette.len() {
return Vec::new();
}
let pattern_schem = time!(match_palette, {
match_palette(&schem, &pattern_schem, search_behavior.ignore_block_data)
});
let mut matches: Vec<Match> = Vec::with_capacity(4);
let schem_data = if search_behavior.ignore_block_data {
match_palette_adapt(
&schem,
&pattern_schem.palette,
search_behavior.ignore_block_data,
)
} else {
schem.block_data
};
let air_id = if search_behavior.ignore_air || search_behavior.air_as_any {
pattern_schem.palette.get("minecraft:air").unwrap_or(&-1)
} else {
&-1
};
let pattern_blocks = pattern_schem.block_data.len() as f32;
let i_pattern_blocks = pattern_blocks as i32;
let pattern_width = pattern_schem.width as usize;
let pattern_height = pattern_schem.height as usize;
let pattern_length = pattern_schem.length as usize;
let schem_width = schem.width as usize;
let schem_height = schem.height as usize;
let schem_length = schem.length as usize;
if search_behavior.opencl {
return time!(ocl_search, {
ocl_search(
schem_data.as_slice(),
[schem_width, schem_height, schem_length],
pattern_schem.block_data.as_slice(),
[pattern_width, pattern_height, pattern_length],
*air_id,
search_behavior,
)
.unwrap()
});
}
let schem_data = schem_data.as_ptr();
let pattern_data = pattern_schem.block_data.as_ptr();
let skip_amount = ceil(
(pattern_blocks * (1.0 - search_behavior.threshold)) as f64,
0,
) as i32;
for y in 0..=schem_height - pattern_height {
for z in 0..=schem_length - pattern_length {
for x in 0..=schem_width - pattern_width {
let mut not_matching = 0;
'outer: for j in 0..pattern_height {
for k in 0..pattern_length {
'inner: for i in 0..pattern_width {
let index = (x + i) + schem_width * ((z + k) + (y + j) * schem_length);
let pattern_index = i + pattern_width * (k + j * pattern_length);
let data = unsafe { *schem_data.add(index) };
let pattern_data = unsafe { *pattern_data.add(pattern_index) };
if (search_behavior.ignore_air && data != *air_id)
|| (search_behavior.air_as_any && pattern_data != *air_id)
{
continue 'inner;
}
if data != pattern_data {
not_matching += 1;
if not_matching >= skip_amount {
break 'outer;
}
}
}
}
}
if not_matching < skip_amount {
matches.push(Match {
x: x as u16,
y: y as u16,
z: z as u16,
percent: (i_pattern_blocks - not_matching) as f32 / pattern_blocks,
});
}
}
}
}
return matches;
}

View File

@ -0,0 +1,9 @@
[package]
name = "schemsearch-ocl-matcher"
version = "0.1.0"
edition = "2021"
[dependencies]
schemsearch-common = { path = "../schemsearch-common" }
ocl = "0.19.7"
libmath = "0.2.1"

View File

@ -0,0 +1,35 @@
// Use 3d_img
// Weniger Allocs an Buffern
// Pattern Parallelisieren mit Local Workern?
// To Match on GPU
// Weniger Worker, Mehr Parameter!
// Pattern als Kernel Konstante
__kernel void add(__global int *result, __global uint *schem,
__constant uint *pattern, const int width, const int height,
const int depth, const int p_width, const int p_height,
const int p_depth, const uint air_id, const int ignore_air,
const int air_as_any, const int skipamount) {
int x = get_global_id(0);
int y = get_global_id(2);
int z = get_global_id(1);
int wrong_blocks = 0;
for (int py = 0; py < p_height; py++) {
for (int pz = 0; pz < p_depth; pz++) {
for (int px = 0; px < p_width; px++) {
// if ((ignore_air && schem_block != air_id) || (air_as_any &&
// pattern_block != air_id)) {
// continue; // TODO: PROBLEM!
// }
wrong_blocks +=
schem[(x + px) + width * ((z + pz) + (y + py) * depth)] !=
pattern[px + p_width * (pz + py * p_depth)];
}
}
}
int idx = x + z * width + y * width * depth;
result[idx] = wrong_blocks;
}

View File

@ -0,0 +1,133 @@
use math::round::ceil;
use ocl::SpatialDims::Three;
use ocl::{core, Buffer, CommandQueueProperties, Context, Image, MemFlags, ProQue};
use schemsearch_common::{time, Match, SearchBehavior};
use std::sync::OnceLock;
use std::time;
const KERNEL: &str = include_str!("kernel.cl");
static PRO_QUEU_CELL: OnceLock<ProQue> = OnceLock::new();
pub fn ocl_available() -> bool {
core::default_platform().is_ok()
}
pub fn ocl_search(
schem: &[i32],
schem_size: [usize; 3],
pattern: &[i32],
pattern_size: [usize; 3],
air_id: i32,
search_behavior: SearchBehavior,
) -> Result<Vec<Match>, String> {
search_ocl(
schem,
schem_size,
pattern,
pattern_size,
air_id,
search_behavior,
)
.map_err(|e| e.to_string())
}
fn search_ocl(
schem: &[i32],
schem_size: [usize; 3],
pattern: &[i32],
pattern_size: [usize; 3],
air_id: i32,
search_behavior: SearchBehavior,
) -> ocl::Result<Vec<Match>> {
let pattern_width = pattern_size[0];
let pattern_height = pattern_size[1];
let pattern_length = pattern_size[2];
let schem_width = schem_size[0];
let schem_height = schem_size[1];
let schem_length = schem_size[2];
let pattern_blocks = (pattern_width * pattern_height * pattern_length) as f32;
let skip_amount = ceil(
(pattern_blocks * (1.0 - search_behavior.threshold)) as f64,
0,
) as i32;
let cell = &PRO_QUEU_CELL;
let mut pro_que = time!(get_pro_que, {
cell.get_or_init(|| ProQue::builder().src(KERNEL).build().unwrap())
.clone()
});
pro_que.set_dims(Three(schem_width, schem_length, schem_height));
let buffer = time!(create_result_buffer, {
Buffer::builder()
.queue(pro_que.queue().clone())
.flags(MemFlags::new().read_write())
.fill_val(-1)
.len(schem.len())
.build()
})?;
let schem_buffer = time!(create_schen_buffer, {
create_schem_buffer(schem, &pro_que)
})?;
let pattern_buffer = time!(create_pattern_buffer, {
create_schem_buffer(pattern, &pro_que)
})?;
let kernel = time!(create_kernel, {
pro_que
.kernel_builder("add")
.arg(&buffer)
.arg(&schem_buffer)
.arg(&pattern_buffer)
.arg(schem_width as i32)
.arg(schem_height as i32)
.arg(schem_length as i32)
.arg(pattern_width as i32)
.arg(pattern_height as i32)
.arg(pattern_length as i32)
.arg(air_id)
.arg(search_behavior.ignore_air as u32)
.arg(search_behavior.air_as_any as u32)
.arg(skip_amount)
.build()
})?;
unsafe {
time!(run_kernel, { kernel.enq() })?;
}
let mut vec = vec![0; buffer.len()];
time!(read_buffer, {
buffer.read(&mut vec).enq()?;
});
Ok(vec
.into_iter()
.enumerate()
.filter(|(_, v)| *v < skip_amount && *v != -1)
.map(|(i, v)| Match {
x: (i % schem_width) as u16,
y: ((i / (schem_width * schem_length)) % schem_height) as u16,
z: ((i / schem_width) % schem_length) as u16,
percent: (pattern_blocks - v as f32) / pattern_blocks,
})
.collect())
}
fn create_schem_buffer(pattern: &[i32], pro_que: &ProQue) -> ocl::Result<Buffer<i32>> {
Buffer::builder()
.queue(pro_que.queue().clone())
.flags(MemFlags::new().read_only())
.len(pattern.len())
// Host Memory Map?
.copy_host_slice(pattern)
.build()
}

View File

@ -1,13 +1,13 @@
[package]
name = "schemsearch-sql"
version = "0.1.0"
version = "0.1.3"
edition = "2021"
license = "AGPL-3.0-or-later"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
sqlx = { version = "0.6", features = [ "runtime-async-std-native-tls" , "mysql" ] }
sqlx = { version = "0.7", features = [ "runtime-async-std-native-tls" , "mysql" ] }
schemsearch-lib = { path = "../schemsearch-lib" }
schemsearch-files = { path = "../schemsearch-files" }

View File

@ -16,7 +16,7 @@
*/
use std::sync::Mutex;
use sqlx::{ConnectOptions, Executor, MySql, MySqlPool, Pool, Row};
use sqlx::{Executor, MySql, Pool, Row};
use sqlx::mysql::{MySqlConnectOptions, MySqlPoolOptions};
use crate::filter::SchematicFilter;

View File

@ -1,12 +0,0 @@
[package]
name = "schemsearch_faster"
version = "0.1.0"
edition = "2021"
license = "AGPL-3.0-or-later"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
schemsearch-lib = { path = "../schemsearch-lib" }
schemsearch-files = { path = "../schemsearch-files" }
hematite-nbt = "0.5.2"

View File

@ -1,73 +0,0 @@
/*
* Copyright (C) 2023 Chaoscaot
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
use nbt::Map;
use schemsearch_files::Schematic;
pub fn convert_to_search_space(schem: &Schematic, palette: &Vec<String>) -> Vec<Vec<u8>> {
let mut data: Vec<Vec<u8>> = Vec::with_capacity(palette.len());
let block_data = &schem.block_data;
for name in palette {
let mut output: Vec<u8> = Vec::with_capacity(block_data.len());
for block in block_data.iter() {
if schem.palette.get(name).unwrap_or(&-1) == block {
output.push(1);
} else {
output.push(0);
}
}
data.push(output);
}
data
}
pub fn unwrap_palette(palette: &Map<String, i32>) -> Vec<String> {
let mut output: Vec<String> = Vec::with_capacity(palette.len());
(0..palette.len()).for_each(|_| output.push(String::new()));
for (key, id) in palette.iter() {
output[*id as usize] = key.clone();
}
output
}
#[allow(unused_imports)]
#[cfg(test)]
mod tests {
use std::path::{Path, PathBuf};
use schemsearch_files::Schematic;
use crate::{convert_to_search_space, unwrap_palette};
//#[test]
pub fn test() {
let schematic = Schematic::load(&PathBuf::from("../tests/Pattern.schem")).unwrap();
dbg!(convert_to_search_space(&schematic, &unwrap_palette(&schematic.palette)));
}
//#[test]
pub fn test_2() {
let schematic = Schematic::load(&PathBuf::from("../tests/Pattern.schem")).unwrap();
let schematic2 = Schematic::load(&PathBuf::from("../tests/Random.schem")).unwrap();
println!("{:?}", convert_to_search_space(&schematic2, &unwrap_palette(&schematic.palette)));
}
//#[test]
pub fn test_big() {
let schematic = Schematic::load(&PathBuf::from("../tests/endstone.schem")).unwrap();
let schematic2 = Schematic::load(&PathBuf::from("../tests/simple.schem")).unwrap();
let _ = convert_to_search_space(&schematic2, &unwrap_palette(&schematic.palette));
}
}