diff --git a/Cargo.lock b/Cargo.lock
index d27ab382..3a274347 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -181,9 +181,9 @@ dependencies = [
[[package]]
name = "clap"
-version = "3.1.15"
+version = "3.1.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "85a35a599b11c089a7f49105658d089b8f2cf0882993c17daf6de15285c2c35d"
+checksum = "d2dbdf4bdacb33466e854ce889eee8dfd5729abf7ccd7664d0a2d60cd384440b"
dependencies = [
"atty",
"bitflags",
@@ -361,9 +361,9 @@ dependencies = [
[[package]]
name = "ed25519"
-version = "1.4.1"
+version = "1.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3d5c4b5e5959dc2c2b89918d8e2cc40fcdd623cef026ed09d2f0ee05199dc8e4"
+checksum = "1e9c280362032ea4203659fc489832d0204ef09f247a0506f170dafcac08c369"
dependencies = [
"signature",
]
@@ -470,9 +470,9 @@ dependencies = [
[[package]]
name = "indexmap"
-version = "1.8.1"
+version = "1.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0f647032dfaa1f8b6dc29bd3edb7bbef4861b8b8007ebb118d6db284fd59f6ee"
+checksum = "e6012d540c5baa3589337a98ce73408de9b5a25ec9fc2c6fd6be8f0d39e0ca5a"
dependencies = [
"autocfg",
"hashbrown",
@@ -507,9 +507,9 @@ checksum = "b71991ff56294aa922b450139ee08b3bfc70982c6b2c7562771375cf73542dd4"
[[package]]
name = "itoa"
-version = "1.0.1"
+version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1aab8fc367588b89dcee83ab0fd66b72b50b72fa1904d7095045ace2b0c81c35"
+checksum = "112c678d4050afce233f4f2852bb2eb519230b3cf12f33585275537d7e41578d"
[[package]]
name = "js-sys"
@@ -528,9 +528,9 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
[[package]]
name = "libc"
-version = "0.2.125"
+version = "0.2.126"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5916d2ae698f6de9bfb891ad7a8d65c09d232dc58cc4ac433c7da3b2fd84bc2b"
+checksum = "349d5a591cd28b49e1d1037471617a32ddcda5731b99419008085f72d5a53836"
[[package]]
name = "log"
@@ -543,9 +543,9 @@ dependencies = [
[[package]]
name = "lz4_flex"
-version = "0.9.2"
+version = "0.9.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "42c51df9d8d4842336c835df1d85ed447c4813baa237d033d95128bf5552ad8a"
+checksum = "74141c8af4bb8136dafb5705826bdd9dce823021db897c1129191804140ddf84"
dependencies = [
"twox-hash",
]
@@ -592,9 +592,9 @@ checksum = "830b246a0e5f20af87141b25c173cd1b609bd7779a4617d6ec582abaf90870f3"
[[package]]
name = "once_cell"
-version = "1.10.0"
+version = "1.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "87f3e037eac156d1775da914196f0f37741a274155e34a0b7e427c35d2a2ecb9"
+checksum = "7709cef83f0c1f58f666e746a08b21e0085f7440fa6a29cc194d68aac97a4225"
[[package]]
name = "oorandom"
@@ -610,9 +610,9 @@ checksum = "624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5"
[[package]]
name = "os_str_bytes"
-version = "6.0.0"
+version = "6.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8e22443d1643a904602595ba1cd8f7d896afe56d26712531c5ff73a15b2fbf64"
+checksum = "21326818e99cfe6ce1e524c2a805c189a99b5ae555a35d19f9a284b427d86afa"
[[package]]
name = "papergrid"
@@ -681,11 +681,11 @@ dependencies = [
[[package]]
name = "proc-macro2"
-version = "1.0.37"
+version = "1.0.39"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ec757218438d5fda206afc041538b2f6d889286160d649a86a24d37e1235afd1"
+checksum = "c54b25569025b7fc9651de43004ae593a75ad88543b17178aa5e1b9c4f15f56f"
dependencies = [
- "unicode-xid",
+ "unicode-ident",
]
[[package]]
@@ -746,9 +746,9 @@ dependencies = [
[[package]]
name = "rayon"
-version = "1.5.2"
+version = "1.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "fd249e82c21598a9a426a4e00dd7adc1d640b22445ec8545feef801d1a74c221"
+checksum = "bd99e5772ead8baa5215278c9b15bf92087709e9c1b2d1f97cdb5a183c933a7d"
dependencies = [
"autocfg",
"crossbeam-deque",
@@ -758,9 +758,9 @@ dependencies = [
[[package]]
name = "rayon-core"
-version = "1.9.2"
+version = "1.9.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9f51245e1e62e1f1629cbfec37b5793bbabcaeb90f30e94d2ba03564687353e4"
+checksum = "258bcdb5ac6dad48491bb2992db6b7cf74878b0384908af124823d118c99683f"
dependencies = [
"crossbeam-channel",
"crossbeam-deque",
@@ -770,9 +770,9 @@ dependencies = [
[[package]]
name = "regex"
-version = "1.5.5"
+version = "1.5.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1a11647b6b25ff05a515cb92c365cec08801e83423a235b51e231e1808747286"
+checksum = "d83f127d94bdbcda4c8cc2e50f6f84f4b611f69c902699ca385a39c3a75f9ff1"
dependencies = [
"aho-corasick",
"memchr",
@@ -787,9 +787,9 @@ checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132"
[[package]]
name = "regex-syntax"
-version = "0.6.25"
+version = "0.6.26"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f497285884f3fcff424ffc933e56d7cbca511def0c9831a7f9b5f6153e3cc89b"
+checksum = "49b3de9ec5dc0a3417da371aab17d729997c15010e7fd24ff707773a33bddb64"
[[package]]
name = "rustc_version"
@@ -802,9 +802,9 @@ dependencies = [
[[package]]
name = "ryu"
-version = "1.0.9"
+version = "1.0.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "73b4b750c782965c211b42f022f59af1fbceabdd026623714f104152f1ec149f"
+checksum = "f3f6f92acf49d1b98f7a81226834412ada05458b7364277387724a237f062695"
[[package]]
name = "same-file"
@@ -856,11 +856,11 @@ dependencies = [
[[package]]
name = "serde_json"
-version = "1.0.80"
+version = "1.0.81"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f972498cf015f7c0746cac89ebe1d6ef10c293b94175a243a2d9442c163d9944"
+checksum = "9b7ce2b32a1aed03c558dc61a5cd328f15aff2dbc17daad8fb8af04d2100e15c"
dependencies = [
- "itoa 1.0.1",
+ "itoa 1.0.2",
"ryu",
"serde",
]
@@ -910,13 +910,13 @@ checksum = "6bdef32e8150c2a081110b42772ffe7d7c9032b606bc226c8260fd97e0976601"
[[package]]
name = "syn"
-version = "1.0.92"
+version = "1.0.95"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7ff7c592601f11445996a06f8ad0c27f094a58857c2f89e97974ab9235b92c52"
+checksum = "fbaf6116ab8924f39d52792136fb74fd60a80194cf1b1c6ffa6453eef1c3f942"
dependencies = [
"proc-macro2",
"quote",
- "unicode-xid",
+ "unicode-ident",
]
[[package]]
@@ -1028,9 +1028,9 @@ dependencies = [
[[package]]
name = "twox-hash"
-version = "1.6.2"
+version = "1.6.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4ee73e6e4924fe940354b8d4d98cad5231175d615cd855b758adc658c0aac6a0"
+checksum = "97fee6b57c6a41524a810daee9286c02d7752c4253064d0b05472833a438f675"
dependencies = [
"cfg-if",
"static_assertions",
@@ -1042,6 +1042,12 @@ version = "1.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dcf81ac59edc17cc8697ff311e8f5ef2d99fcbd9817b34cec66f90b6c3dfd987"
+[[package]]
+name = "unicode-ident"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d22af068fba1eb5edcb4aea19d382b2a3deb4c8f9d475c589b6ada9e0fd493ee"
+
[[package]]
name = "unicode-width"
version = "0.1.9"
@@ -1066,7 +1072,7 @@ dependencies = [
[[package]]
name = "vach"
-version = "0.3.5"
+version = "0.4.0"
dependencies = [
"aes-gcm",
"brotli",
@@ -1084,18 +1090,18 @@ name = "vach-benchmarks"
version = "0.1.0"
dependencies = [
"criterion",
+ "rayon",
"vach",
]
[[package]]
name = "vach-cli"
-version = "0.3.6"
+version = "0.3.7"
dependencies = [
"anyhow",
- "clap 3.1.15",
+ "clap 3.1.18",
"indicatif",
"log",
- "num_cpus",
"pretty_env_logger",
"tabled",
"term_size",
diff --git a/Cargo.toml b/Cargo.toml
index 0a15ddfa..cb277e8f 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -1,6 +1,5 @@
[workspace]
members = ["vach", "vach-cli", "vach-benchmarks"]
-edition = "2021"
[profile.bench]
debug = true
diff --git a/README.md b/README.md
index 796c60e6..0661cdd1 100644
--- a/README.md
+++ b/README.md
@@ -1,5 +1,5 @@
-
+
vach
@@ -10,17 +10,17 @@
-
+
-
+
- Docs | Repo
+ Docs | Repo
## 👔 The official `vach` crates' repo
-`vach`, pronounced like "puck" but with a "v", is an archiving and resource transmission format. It was built to be secure, contained and protected. It was, in fact, designed by the [SCP](https://en.wikipedia.org/wiki/SCP_Foundation) to keep your anomalous assets compact and secure during transmission. `vach` also has in-built support for multiple compression schemes (LZ4, Snappy and Brolti), [data signing](https://github.com/dalek-cryptography/ed25519-dalek), leaf [bitflags](https://docs.rs/vach/latest/vach/archive/struct.Flags.html), [encryption](https://docs.rs/aes-gcm/latest/aes_gcm/) and some degree of archive customization. Check out the `vach` spec at **[spec.txt](https://github.com/zeskeertwee/virtfs-rs/blob/main/spec/main.txt)**. Any and *all* help will be much appreciated, especially proof reading the docs and code review.
+`vach`, pronounced like "puck" but with a "v", is an archiving and resource transmission format. It was built to be secure, contained and protected. It was, in fact, designed by the [SCP](https://en.wikipedia.org/wiki/SCP_Foundation) to keep your anomalous assets compact and secure during transmission. `vach` also has in-built support for multiple compression schemes (LZ4, Snappy and Brolti), [data signing](https://github.com/dalek-cryptography/ed25519-dalek), leaf [bitflags](https://docs.rs/vach/latest/vach/archive/struct.Flags.html), [encryption](https://docs.rs/aes-gcm/latest/aes_gcm/) and some degree of archive customization. Check out the `vach` spec at **[spec.txt](https://github.com/zeskeertwee/vach/blob/main/spec/main.txt)**. Any and *all* help will be much appreciated, especially proof reading the docs and code review.
---
@@ -36,7 +36,7 @@
### 🤷 Who is what, when where?
- **vach:** An archiving format, like `tar`, `zip` and `rar`. Also the base crate for handling `.vach` files in your application.
-- **vach-cli:** A CLI tool for dealing with `.vach` files.
+- **vach-cli:** A CLI tool for dealing with `.vach` files.
---
@@ -61,8 +61,8 @@ let mut builder = Builder::default();
// Use `Builder::add( reader, ID )` to add data to the write queue
builder.add(File::open("test_data/background.wav")?, "ambient").unwrap();
-builder.add(File::open("test_data/footstep.wav")?, "ftstep").unwrap();
-builder.add(Cursor::new(b"Hello, Cassandra!"), "hello").unwrap();
+builder.add(vec![12, 23, 34, 45, 56, 67, 78, 89, 10], "ftstep").unwrap();
+builder.add(b"Fast-Acting Long-Lasting, *Bathroom Reader*" as &[u8], "hello").unwrap();
// let mut target = File::create("sounds.vach")?;
let mut target = Cursor::new(Vec::new());
@@ -81,12 +81,12 @@ let target = File::open("sounds.vach")?;
let archive = Archive::from_handle(target)?;
let resource: Resource = archive.fetch("ambient")?;
-// By default all resources are flagged as NOT secure
+// By default all resources are flagged as NOT authenticated
println!("{}", Sound::new(&resource.data)?);
-assert!(!resource.secured);
+assert!(!resource.authenticated);
let mut buffer = Vec::new();
-let (flags, content_version, is_secure) = archive.fetch_write("ftstep", &mut buffer)?;
+let (flags, content_version, is_authenticated) = archive.fetch_write("ftstep", &mut buffer)?;
```
##### > Build a signed `.vach` file
@@ -94,7 +94,7 @@ let (flags, content_version, is_secure) = archive.fetch_write("ftstep", &mut buf
```rust
use std::{io::Cursor, fs::File};
use vach::prelude::{Builder, BuilderConfig, Keypair};
-use vach::utils::gen_keypair;
+use vach::crypto_utils::gen_keypair;
let keypair: Keypair = gen_keypair();
let config: BuilderConfig = BuilderConfig::default().keypair(keypair);
@@ -103,7 +103,7 @@ let mut builder: Builder = Builder::default();
// Use different data types under the same builder umbrella, uses dynamic dispatch
let data_1 = vec![12, 23, 45, 56, 67 ,78, 89, 69];
let data_2 = File::open("test_data/footstep.wav").unwrap();
-let data_3 = b"Fast-Acting Long-Lasting, *Bathroom Reader*" as &[u8];
+let data_3 = b"Hello, Cassandra!" as &[u8];
// Use `Builder::add( reader, ID )` to add data to the write queue
builder.add(data_3, "ambient").unwrap();
@@ -120,7 +120,7 @@ As `Keypair`, `SecretKey` and `PublicKey` are reflected from [ed25519_dalek](htt
```rust
use vach::prelude::{Keypair, SecretKey, PublicKey};
-use vach::utils::gen_keypair;
+use vach::crypto_utils::gen_keypair;
// Generate keys
let keypair : Keypair = gen_keypair();
@@ -145,7 +145,7 @@ let keypair : Keypair = Keypair::from_bytes(&keypair_bytes).unwrap();
let mut public_key_bytes: [u8; crate::PUBLIC_KEY_LENGTH] = include_bytes!(PUBLIC_KEY);
// Build the Loader config
-let mut config = HeaderConfig::default().key(PublicKey::from_bytes(&public_key_bytes)?);
+let mut config = ArchiveConfig::default().key(PublicKey::from_bytes(&public_key_bytes)?);
let target = File::open("sounds.vach")?;
let archive = Archive::with_config(target, &config)?;
@@ -153,7 +153,7 @@ let archive = Archive::with_config(target, &config)?;
// Resources are marked as secure (=true) if the signatures match the data
let resource = archive.fetch("ambient")?;
println!("{}", Sound::new(&resource.data)?);
-assert!(resource.secured);
+assert!(resource.authenticated);
```
##### > A quick consolidated example
@@ -180,7 +180,7 @@ builder.add_leaf(Leaf::from_handle(data_3).id("d3").compress(CompressMode::Detec
builder.dump(&mut target, &config)?;
// Load data
-let config = HeaderConfig::default().magic(*MAGIC);
+let config = ArchiveConfig::default().magic(*MAGIC);
let archive = Archive::with_config(target, &config)?;
// Quick assertions
diff --git a/vach-benchmarks/Cargo.toml b/vach-benchmarks/Cargo.toml
index 36d6614e..bd630566 100644
--- a/vach-benchmarks/Cargo.toml
+++ b/vach-benchmarks/Cargo.toml
@@ -7,7 +7,8 @@ publish = false
[dependencies]
criterion = "0.3.5"
-vach = { path = "../vach", features = ["multithreaded", "compression"] }
+vach = { path = "../vach", features = ["compression", "crypto"] }
+rayon = "*"
[[bench]]
name = "benchmark"
diff --git a/vach-benchmarks/benches/main.rs b/vach-benchmarks/benches/main.rs
index 8c11694b..cb91efaa 100644
--- a/vach-benchmarks/benches/main.rs
+++ b/vach-benchmarks/benches/main.rs
@@ -1,8 +1,10 @@
+use std::collections::HashMap;
use std::io;
use criterion::{Criterion, black_box, criterion_group, criterion_main, Throughput};
+use rayon::iter::{ParallelIterator, IntoParallelRefIterator};
use vach::prelude::*;
-use vach::utils::gen_keypair;
+use vach::crypto_utils::gen_keypair;
// Remove io overhead by Sinking data into the void
struct Sink;
@@ -36,7 +38,7 @@ pub fn criterion_benchmark(c: &mut Criterion) {
let mut b_config = BuilderConfig::default().magic(*MAGIC);
b_config.load_keypair(keypair_bytes).unwrap();
- let mut h_config = HeaderConfig::default().magic(*MAGIC);
+ let mut h_config = ArchiveConfig::default().magic(*MAGIC);
h_config.load_public_key(&keypair_bytes[32..]).unwrap();
/* BUILDER BENCHMARKS */
@@ -79,9 +81,9 @@ pub fn criterion_benchmark(c: &mut Criterion) {
{
// Builds an archive source from which to benchmark
let template = Leaf::default()
- .encrypt(true)
+ .encrypt(false)
.sign(false)
- .compress(CompressMode::Always)
+ .compress(CompressMode::Never)
.compression_algo(CompressionAlgorithm::LZ4);
let mut builder = Builder::new().template(template);
@@ -112,7 +114,13 @@ pub fn criterion_benchmark(c: &mut Criterion) {
throughput_group.bench_function("Archive::fetch_batch(---)", |b| {
// Load data
b.iter(|| {
- archive.fetch_batch(["d2", "d1", "d3"].into_iter(), None).unwrap();
+ let resources = ["d2", "d1", "d3"]
+ .as_slice()
+ .par_iter()
+ .map(|id| (id, archive.fetch(&id)))
+ .collect::>();
+
+ criterion::black_box(resources)
});
});
diff --git a/vach-cli/Cargo.toml b/vach-cli/Cargo.toml
index 21fddc3a..4da1de3f 100644
--- a/vach-cli/Cargo.toml
+++ b/vach-cli/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "vach-cli"
-version = "0.3.6"
+version = "0.3.7"
edition = "2021"
authors = [
"Jasper Fortuin ",
@@ -27,7 +27,6 @@ log = "0.4.17"
walkdir = "2.3.2"
pretty_env_logger = "0.4.0"
term_size = "0.3.2"
-num_cpus = "1.13.1"
[dependencies.vach]
path = "../vach"
diff --git a/vach-cli/src/commands/keypair.rs b/vach-cli/src/commands/keypair.rs
index 077cc79d..96e3b5e8 100644
--- a/vach-cli/src/commands/keypair.rs
+++ b/vach-cli/src/commands/keypair.rs
@@ -1,4 +1,4 @@
-use vach::utils::gen_keypair;
+use vach::crypto_utils::gen_keypair;
use crate::utils;
use crate::keys::key_names;
diff --git a/vach-cli/src/commands/list.rs b/vach-cli/src/commands/list.rs
index f3e4281e..e89bb09c 100644
--- a/vach-cli/src/commands/list.rs
+++ b/vach-cli/src/commands/list.rs
@@ -2,7 +2,7 @@ use std::fs::File;
use tabled::{Style, Table, Tabled, Modify, Full, MaxWidth, Alignment, Columns};
use vach::{
- prelude::{HeaderConfig, Archive, Flags},
+ prelude::{ArchiveConfig, Archive, Flags},
archive::{CompressionAlgorithm, RegistryEntry},
};
use indicatif::HumanBytes;
@@ -47,7 +47,7 @@ impl CommandTrait for Evaluator {
};
let file = File::open(archive_path)?;
- let archive = Archive::with_config(file, &HeaderConfig::new(magic, None))?;
+ let archive = Archive::with_config(file, &ArchiveConfig::new(magic, None))?;
if archive.entries().is_empty() {
println!("{}", archive);
diff --git a/vach-cli/src/commands/pack.rs b/vach-cli/src/commands/pack.rs
index d52c6e4f..ee841d72 100644
--- a/vach-cli/src/commands/pack.rs
+++ b/vach-cli/src/commands/pack.rs
@@ -7,7 +7,7 @@ use std::path::PathBuf;
use std::collections::HashSet;
use vach::prelude::*;
-use vach::utils;
+use vach::crypto_utils;
use indicatif::{ProgressBar, ProgressStyle};
use walkdir;
@@ -163,12 +163,12 @@ impl CommandTrait for Evaluator {
let secret_key = match args.value_of(key_names::KEYPAIR) {
Some(path) => {
let file = File::open(path)?;
- Some(utils::read_keypair(file)?.secret)
+ Some(crypto_utils::read_keypair(file)?.secret)
},
None => match args.value_of(key_names::SECRET_KEY) {
Some(path) => {
let file = File::open(path)?;
- Some(utils::read_secret_key(file)?)
+ Some(crypto_utils::read_secret_key(file)?)
},
None => None,
},
@@ -185,7 +185,7 @@ impl CommandTrait for Evaluator {
// If encrypt is true, and no keypair was found: Generate and write a new keypair to a file
if (encrypt || hash) && kp.is_none() {
- let generated = utils::gen_keypair();
+ let generated = crypto_utils::gen_keypair();
let mut file = File::create("keypair.kp")?;
file.write_all(&generated.to_bytes())?;
diff --git a/vach-cli/src/commands/split.rs b/vach-cli/src/commands/split.rs
index fd108a47..a8062d49 100644
--- a/vach-cli/src/commands/split.rs
+++ b/vach-cli/src/commands/split.rs
@@ -1,6 +1,6 @@
use std::fs::File;
-use vach::utils::read_keypair;
+use vach::crypto_utils::read_keypair;
use super::CommandTrait;
use crate::{keys::key_names, utils};
diff --git a/vach-cli/src/commands/unpack.rs b/vach-cli/src/commands/unpack.rs
index 04418e86..343d9b21 100644
--- a/vach-cli/src/commands/unpack.rs
+++ b/vach-cli/src/commands/unpack.rs
@@ -1,13 +1,12 @@
-use std::collections::HashSet;
use std::fs::{self, File};
use std::str::FromStr;
-use std::io::{Read, Seek, self};
+use std::io::{Read, Seek};
use std::path::PathBuf;
use std::time::Instant;
-use vach::archive::RegistryEntry;
-use vach::prelude::{HeaderConfig, Archive, InternalError};
-use vach::utils;
+use vach::prelude::{ArchiveConfig, Archive, InternalError};
+use vach::rayon::iter::{IntoParallelRefIterator, ParallelIterator};
+use vach::crypto_utils;
use indicatif::{ProgressBar, ProgressStyle};
use super::CommandTrait;
@@ -47,12 +46,12 @@ impl CommandTrait for Evaluator {
Err(err) => anyhow::bail!("IOError: {} @ {}", err, path),
};
- Some(utils::read_keypair(file)?.public)
+ Some(crypto_utils::read_keypair(file)?.public)
},
None => match args.value_of(key_names::PUBLIC_KEY) {
Some(path) => {
let file = File::open(path)?;
- Some(utils::read_public_key(file)?)
+ Some(crypto_utils::read_public_key(file)?)
},
None => None,
},
@@ -66,17 +65,17 @@ impl CommandTrait for Evaluator {
Err(err) => anyhow::bail!("IOError: {} @ {}", err, input_path),
};
- // Generate HeaderConfig using given magic and public key
- let header_config = HeaderConfig::new(magic, public_key);
+ // Generate ArchiveConfig using given magic and public key
+ let header_config = ArchiveConfig::new(magic, public_key);
// Parse then extract archive
let archive = match Archive::with_config(input_file, &header_config) {
Ok(archive) => archive,
Err(err) => match err {
- InternalError::NoKeypairError(_) => anyhow::bail!(
+ InternalError::NoKeypairError => anyhow::bail!(
"Please provide a public key or a keypair for use in decryption or signature verification"
),
- InternalError::ValidationError(err) => anyhow::bail!("Unable to validate the archive: {}", err),
+ InternalError::MalformedArchiveSource(_) => anyhow::bail!("Unable to validate the archive: {}", err),
err => anyhow::bail!("Encountered an error: {}", err.to_string()),
},
};
@@ -120,71 +119,43 @@ fn extract_archive(archive: &Archive, target_fo
]),
);
- // Generates window slices from a bigger window based on core count, therefore dispatching work evenly across threads
- let num_cores = num_cpus::get();
- let window_size = if num_cores * 2 > archive.entries().len() {
- 1
- } else {
- num_cores * 2
- };
-
// Vector to allow us to window later via .as_slice()
- let entry_vec = archive.entries().iter().collect::>();
- let entry_windows: Vec<&[(&String, &RegistryEntry)]> = entry_vec.windows(window_size).collect();
-
- // Stores processed values and keeps track of results
- let mut processed_ids = HashSet::new();
- let mut processed_resources;
-
- // Process entries concurrently
- for entry_batch in entry_windows {
- processed_resources = archive.fetch_batch(
- entry_batch
- .iter()
- .filter(|(id, _)| !processed_ids.contains(*id))
- .inspect(|(id, _)| {
- /* Sets message inside the progress bar */
-
- // Prevent column from wrapping around
- let mut msg = (**id).clone();
- if let Some((terminal_width, _)) = term_size::dimensions() {
- // Make sure progress bar never get's longer than terminal size
- if msg.len() + 140 >= terminal_width {
- msg.truncate(terminal_width - 140);
- msg.push_str("...");
- }
- };
-
- pbar.set_message(msg);
- })
- .map(|f| f.0.as_str()),
- Some(num_cores),
- )?;
-
- processed_resources
- .into_iter()
- .try_for_each(|(id, data)| -> anyhow::Result<()> {
- // Process filesystem
- let mut save_path = target_folder.clone();
- save_path.push(&id);
-
- if let Some(parent_dir) = save_path.ancestors().nth(1) {
- fs::create_dir_all(parent_dir)?;
- };
+ let entry_vec = archive.entries().iter().map(|a| (a.0, a.1.offset)).collect::>();
+
+ // ignore the unprofessional match clause
+ match entry_vec.as_slice().par_iter().try_for_each(|(id, offset)| {
+ // Prevent column from wrapping around
+ if let Some((terminal_width, _)) = term_size::dimensions() {
+ let mut msg = id.to_string();
+ // Make sure progress bar never get's longer than terminal size
+ if msg.len() + 140 >= terminal_width {
+ msg.truncate(terminal_width - 140);
+ msg.push_str("...");
+ }
+
+ // Set's the Progress Bar message
+ pbar.set_message(msg.to_string());
+ };
- // Write to file and update process queue
- let resource = data?;
- let mut file = File::create(save_path)?;
- io::copy(&mut resource.data.as_slice(), &mut file)?;
+ // Process filesystem
+ let mut save_path = target_folder.clone();
+ save_path.push(&id);
- // Increment Progress Bar
- let entry = archive.fetch_entry(&id).unwrap();
- pbar.inc(entry.offset);
- processed_ids.insert(id);
+ if let Some(parent_dir) = save_path.ancestors().nth(1) {
+ fs::create_dir_all(parent_dir)?;
+ };
- Ok(())
- })?;
- }
+ // Write to file and update process queue
+ let mut file = File::create(save_path)?;
+ archive.fetch_write(id, &mut file)?;
+
+ // Increment Progress Bar
+ pbar.inc(*offset);
+ Ok(())
+ }) {
+ Ok(it) => it,
+ Err(err) => return Err(err),
+ };
// Finished extracting
pbar.finish_and_clear();
diff --git a/vach-cli/src/commands/verify.rs b/vach-cli/src/commands/verify.rs
index 78483f99..98a018a1 100644
--- a/vach-cli/src/commands/verify.rs
+++ b/vach-cli/src/commands/verify.rs
@@ -1,6 +1,6 @@
use std::fs::File;
-use vach::archive::{Archive, HeaderConfig};
+use vach::archive::{Archive, ArchiveConfig};
use super::CommandTrait;
use crate::keys::key_names;
@@ -24,7 +24,7 @@ impl CommandTrait for Evaluator {
let input_file = File::open(input_path)?;
- if let Err(err) = Archive::with_config(input_file, &HeaderConfig::new(magic, None)) {
+ if let Err(err) = Archive::with_config(input_file, &ArchiveConfig::new(magic, None)) {
anyhow::bail!("Unable to verify the archive source, error: {}", err.to_string())
};
diff --git a/vach/Cargo.toml b/vach/Cargo.toml
index 761689be..693941fe 100644
--- a/vach/Cargo.toml
+++ b/vach/Cargo.toml
@@ -2,7 +2,7 @@
name = "vach"
# NOTE: Make sure spec.txt and vach::VERSION constants are all synced up
-version = "0.3.5"
+version = "0.4.0"
edition = "2021"
authors = [
@@ -38,9 +38,9 @@ rayon = { version = "1.5.2", optional = true }
num_cpus = { version = "1.13.1", optional = true }
[features]
-default = ["builder", "loader"]
+default = ["builder", "archive"]
-loader = []
+archive = []
builder = []
crypto = ["ed25519-dalek", "aes-gcm", "rand"]
diff --git a/vach/README.md b/vach/README.md
index 8cbc4ce8..e6048ec8 100644
--- a/vach/README.md
+++ b/vach/README.md
@@ -1,5 +1,5 @@
-
+
vach
@@ -10,17 +10,17 @@
-
+
-
+
- Docs | Repo
+ Docs | Repo
---
-`vach`, pronounced like "puck" but with a "v", is an archiving and resource transmission format. It was built to be secure, contained and protected. It was, in fact, designed by the [SCP](https://en.wikipedia.org/wiki/SCP_Foundation) to keep your anomalous assets compact and secure during transmission. `vach` also has in-built support for multiple compression schemes (LZ4, Snappy and Brolti), [data signing](https://github.com/dalek-cryptography/ed25519-dalek), leaf [bitflags](https://docs.rs/vach/latest/vach/archive/struct.Flags.html), [encryption](https://docs.rs/aes-gcm/latest/aes_gcm/) and some degree of archive customization. Check out the `vach` spec at **[spec.txt](https://github.com/zeskeertwee/virtfs-rs/blob/main/spec/main.txt)**. Any and *all* help will be much appreciated, especially proof reading the docs and code review.
+`vach`, pronounced like "puck" but with a "v", is an archiving and resource transmission format. It was built to be secure, contained and protected. It was, in fact, designed by the [SCP](https://en.wikipedia.org/wiki/SCP_Foundation) to keep your anomalous assets compact and secure during transmission. A big benefit of `vach` is the fine grained control it grants it's users, as it allows for per-entry independent configuration. `vach` also has in-built support for multiple compression schemes (LZ4, Snappy and Brolti), [data signing](https://github.com/dalek-cryptography/ed25519-dalek), leaf [bitflags](https://docs.rs/vach/latest/vach/archive/struct.Flags.html), [encryption](https://docs.rs/aes-gcm/latest/aes_gcm/) and some degree of archive customization. Check out the `vach` spec at **[spec.txt](https://github.com/zeskeertwee/vach/blob/main/spec/main.txt)**. Any and *all* help will be much appreciated, especially proof reading the docs and code review.
## ⛏ Who is this for?
@@ -42,7 +42,7 @@ let archive = Archive::from_handle(source)?;
let resource: Resource = archive.fetch("footstep.wav")?;
// By default all resources are flagged as NOT secure
-assert!(!resource.secured);
+assert!(!resource.authenticated);
// Use the data
use my_crate::Sound;
diff --git a/vach/src/crypto.rs b/vach/src/crypto.rs
index 871581fe..9886514d 100644
--- a/vach/src/crypto.rs
+++ b/vach/src/crypto.rs
@@ -18,7 +18,7 @@ pub(crate) struct Encryptor {
impl fmt::Debug for Encryptor {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- write!(f, "[Vach::Encryptor] cipher: Aes256Gcm, nonce: Nonce",)
+ write!(f, "[Vach::Encryptor] cipher: Aes256Gcm, nonce: {:?}", self.nonce)
}
}
@@ -29,7 +29,7 @@ impl Encryptor {
// Build Nonce
let key = Key::from_slice(bytes);
- let mut v = [178, 5, 239, 228, 165, 44, 169].to_vec();
+ let mut v = vec![178, 5, 239, 228, 165, 44, 169];
v.extend_from_slice(&magic);
Encryptor {
@@ -42,7 +42,7 @@ impl Encryptor {
pub(crate) fn encrypt(&self, data: &[u8]) -> InternalResult> {
let res = match self.cipher.encrypt(&self.nonce, data) {
Ok(data) => data,
- Err(err) => return Err(InternalError::CryptoError(err.to_string())),
+ Err(err) => return Err(InternalError::CryptoError(err)),
};
Ok(res)
@@ -51,7 +51,7 @@ impl Encryptor {
pub(crate) fn decrypt(&self, data: &[u8]) -> InternalResult> {
let res = match self.cipher.decrypt(&self.nonce, data) {
Ok(data) => data,
- Err(err) => return Err(InternalError::CryptoError(err.to_string())),
+ Err(err) => return Err(InternalError::CryptoError(err)),
};
Ok(res)
diff --git a/vach/src/utils/mod.rs b/vach/src/crypto_utils/mod.rs
similarity index 73%
rename from vach/src/utils/mod.rs
rename to vach/src/crypto_utils/mod.rs
index 0b2d9c46..5bf49063 100644
--- a/vach/src/utils/mod.rs
+++ b/vach/src/crypto_utils/mod.rs
@@ -1,11 +1,14 @@
-#[cfg(feature = "crypto")]
-use rand::rngs::OsRng;
-use std::{io::Read};
+#![cfg(feature = "crypto")]
+#![cfg_attr(docsrs, doc(cfg(feature = "crypto")))]
-use crate::global::{error::InternalError, result::InternalResult};
-
-#[cfg(feature = "crypto")]
-use crate::crypto;
+use {
+ rand::rngs::OsRng,
+ crate::{
+ crypto,
+ global::{error::InternalError, result::InternalResult},
+ },
+ std::{io::Read},
+};
// A favour
#[cfg(feature = "compression")]
@@ -13,7 +16,6 @@ pub use super::global::compressor::Compressor;
/// Use this function to easily generate a [Keypair](https://docs.rs/ed25519-dalek/latest/ed25519_dalek/struct.Keypair.html) using `OsRng`
#[inline(always)]
-#[cfg(feature = "crypto")]
pub fn gen_keypair() -> crypto::Keypair {
crypto::Keypair::generate(&mut OsRng)
}
@@ -21,8 +23,6 @@ pub fn gen_keypair() -> crypto::Keypair {
/// Use this to read and parse a `Keypair` from a read stream
/// ### Errors
/// - If the data can't be parsed into a keypair
-#[cfg(feature = "crypto")]
-#[cfg_attr(docsrs, doc(cfg(feature = "crypto")))]
pub fn read_keypair(mut handle: R) -> InternalResult {
let mut keypair_bytes = [0; crate::KEYPAIR_LENGTH];
handle.read_exact(&mut keypair_bytes)?;
@@ -38,8 +38,6 @@ pub fn read_keypair(mut handle: R) -> InternalResult {
/// ### Errors
/// - If parsing of the public key fails
/// - `io` errors
-#[cfg(feature = "crypto")]
-#[cfg_attr(docsrs, doc(cfg(feature = "crypto")))]
pub fn read_public_key(mut handle: T) -> InternalResult {
let mut keypair_bytes = [0; crate::PUBLIC_KEY_LENGTH];
@@ -47,7 +45,7 @@ pub fn read_public_key(mut handle: T) -> InternalResult Ok(pk),
- Err(err) => Err(InternalError::ValidationError(err.to_string())),
+ Err(err) => Err(InternalError::ParseError(err.to_string())),
}
}
/// Read and parse a secret key from a read stream
@@ -55,8 +53,6 @@ pub fn read_public_key(mut handle: T) -> InternalResult(mut handle: T) -> InternalResult {
let mut secret_bytes = [0; crate::SECRET_KEY_LENGTH];
@@ -64,6 +60,6 @@ pub fn read_secret_key(mut handle: T) -> InternalResult Ok(sk),
- Err(err) => Err(InternalError::ValidationError(err.to_string())),
+ Err(err) => Err(InternalError::ParseError(err.to_string())),
}
}
diff --git a/vach/src/global/compressor.rs b/vach/src/global/compressor.rs
index 289615db..b5f5c23e 100644
--- a/vach/src/global/compressor.rs
+++ b/vach/src/global/compressor.rs
@@ -1,10 +1,7 @@
#![cfg(feature = "compression")]
#![cfg_attr(docsrs, doc(cfg(feature = "compression")))]
-use std::{
- fmt::Debug,
- io::{self, Read, Write},
-};
+use std::io::{self, Read, Write};
use crate::prelude::Flags;
use super::{error::InternalError, result::InternalResult};
@@ -26,7 +23,7 @@ impl<'a, T: Read> Compressor {
Compressor { data }
}
/// Pass in a compression algorithm to use, sit back and let the compressor do it's job
- pub fn compress(&mut self, algo: CompressionAlgorithm, output: &mut dyn Write) -> InternalResult<()> {
+ pub fn compress(&mut self, algo: CompressionAlgorithm, output: &mut dyn Write) -> InternalResult {
match algo {
CompressionAlgorithm::LZ4 => {
let mut compressor = lz4::frame::FrameEncoder::new(output);
@@ -53,7 +50,7 @@ impl<'a, T: Read> Compressor {
}
}
/// Pass in a compression algorithm to use, sit back and let the decompressor do it's job. That is if the compressed data *is* compressed with the adjacent algorithm
- pub fn decompress(&mut self, algo: CompressionAlgorithm, output: &mut dyn Write) -> InternalResult<()> {
+ pub fn decompress(&mut self, algo: CompressionAlgorithm, output: &mut dyn Write) -> InternalResult {
match algo {
CompressionAlgorithm::LZ4 => {
let mut rdr = lz4::frame::FrameDecoder::new(&mut self.data);
diff --git a/vach/src/global/error.rs b/vach/src/global/error.rs
index fe29dc4d..aa9d336c 100644
--- a/vach/src/global/error.rs
+++ b/vach/src/global/error.rs
@@ -5,61 +5,48 @@ use thiserror::Error;
#[derive(Debug, Error)]
pub enum InternalError {
/// Generic all encompassing error
- /// ```rust
- /// use vach::prelude::InternalError;
- ///
- /// let error = InternalError::OtherError("I love errors, I think they are swell".into());
- /// ```
#[error("[VachError::GenericError] {0}")]
OtherError(Box),
/// Produced when a cargo feature isn't available for a certain action: eg trying to decompress without the compression feature
#[error("[VachError::MissingFeatureError] Unable to continue with operation, the cargo feature ({0}) is missing")]
- MissingFeatureError(String),
+ MissingFeatureError(&'static str),
/// An error that is returned when either a [Keypair](crate::crypto::Keypair), Signature, [PublicKey](crate::crypto::PublicKey) or [SecretKey](crate::crypto::SecretKey) fails to deserialize.
#[error("[VachError::ParseError] {0}")]
ParseError(String),
/// A thin wrapper over [io::Error](std::io::Error), captures all IO errors
#[error("[VachError::IOError] {0}")]
IOError(#[from] io::Error),
- /// Thrown when the loader fails to validate an archive source
- #[error("[VachError::ValidationError] {0}")]
- ValidationError(String),
+ /// Thrown when the archive finds an invalid MAGIC sequence in the given source, hinting at corruption or possible incompatibility with the given source
+ /// You can customize the MAGIC in the [`Builder`](crate::builder::BuilderConfig) and use in the the [`ArchiveConfig`](crate::archive::ArchiveConfig)
+ #[error("[VachError::ValidationError] Invalid magic found in Header, possible incompatibility with given source. Magic found {0:?}")]
+ MalformedArchiveSource([u8; crate::MAGIC_LENGTH]),
/// Thrown by `Archive::fetch(---)` when a given resource is not found
- #[error("[VachError::MissingResourceError] {0}")]
+ #[error("[VachError::MissingResourceError] Resource not found: {0}")]
MissingResourceError(String),
/// Thrown when a leaf with an identical ID to a queued leaf is add with the `Builder::add(---)` functions
#[error("[VachError::LeafAppendError] A leaf with the ID: {0} already exists. Consider changing the ID to prevent collisions")]
LeafAppendError(String),
/// Thrown when no `Keypair` is provided and an encrypted [Leaf](crate::builder::Leaf) is encountered
- #[error("[VachError::NoKeypairError] {0}")]
- NoKeypairError(String),
+ #[error("[VachError::NoKeypairError] Unable to continue with cryptographic operation, as no keypair was supplied")]
+ NoKeypairError,
/// Thrown when decryption or encryption fails
+ #[cfg(feature = "crypto")]
#[error("[VachError::CryptoError] {0}")]
- CryptoError(String),
+ CryptoError(aes_gcm::Error),
/// Thrown when an attempt is made to set a bit within the first four bits(restricted) of a [`Flags`](crate::prelude::Flags) instance
#[error("[VachError::RestrictedFlagAccessError] Tried to set reserved bit(s)!")]
RestrictedFlagAccessError,
- /// When a [`Leaf`](crate::builder::Leaf) has an ID that is longer than `crate::MAX_ID_LENGTH`
+ /// When a [`Leaf`](crate::builder::Leaf) has an ID that is longer than `crate::MAX_ID_LENGTH`, contains the overflowing `ID`
#[error("[VachError::IDSizeOverflowError] The maximum size of any ID is: {}. The leaf with ID: {0} has an overflowing ID of length: {}", crate::MAX_ID_LENGTH, .0.len())]
IDSizeOverflowError(String),
- /// An error that is thrown when the current loader attempts to load an incompatible version, contains the incompatible version
- #[error("The provided archive source has version: {}. While the loader has a spec-version: {}. The current loader is incompatible!", .0, crate::VERSION)]
+ /// An error that is thrown when the current archive attempts to load an incompatible version, contains the incompatible version
+ #[error("The provided archive source has version: {}. While the current implementation has a spec-version: {}. The provided source is incompatible!", .0, crate::VERSION)]
IncompatibleArchiveVersionError(u16),
/// An error that is thrown when if `Mutex` is poisoned, when a message doesn't go though an `mspc::sync_channel` or other sync related issues
#[error("[VachError::SyncError] {0}")]
SyncError(String),
/// Errors thrown during compression or decompression
- #[cfg(feature = "compression")]
#[error("[VachError::CompressorDecompressorError]: {0}")]
- DeCompressionError(String),
-}
-
-#[cfg(feature = "compression")]
-use lz4_flex as lz4;
-
-#[cfg(feature = "compression")]
-impl From for InternalError {
- fn from(err: lz4::frame::Error) -> InternalError {
- InternalError::DeCompressionError(err.to_string())
- }
+ #[cfg(feature = "compression")]
+ DeCompressionError(#[from] lz4_flex::frame::Error),
}
diff --git a/vach/src/global/header.rs b/vach/src/global/header.rs
index 49a688cb..54436b13 100644
--- a/vach/src/global/header.rs
+++ b/vach/src/global/header.rs
@@ -7,7 +7,7 @@ use super::{error::InternalError, result::InternalResult, flags::Flags};
/// Used to configure and give extra information to the [`Archive`](crate::archive::Archive) loader.
/// Used exclusively in archive source and integrity validation.
#[derive(Debug, Clone, Copy)]
-pub struct HeaderConfig {
+pub struct ArchiveConfig {
/// If the archive has a custom magic sequence, pass the custom _MAGIC_ sequence here.
/// The custom _MAGIC_ sequence can then be used to validate archive sources.
pub magic: [u8; crate::MAGIC_LENGTH],
@@ -18,33 +18,33 @@ pub struct HeaderConfig {
pub public_key: Option,
}
-impl HeaderConfig {
- /// Construct a new [`HeaderConfig`] struct.
+impl ArchiveConfig {
+ /// Construct a new [`ArchiveConfig`] struct.
/// ```
- /// use vach::prelude::HeaderConfig;
- /// let config = HeaderConfig::new(*b"_TEST", None);
+ /// use vach::prelude::ArchiveConfig;
+ /// let config = ArchiveConfig::new(*b"_TEST", None);
/// ```
#[inline(always)]
#[cfg(feature = "crypto")]
#[cfg_attr(docsrs, doc(cfg(feature = "crypto")))]
- pub const fn new(magic: [u8; 5], key: Option) -> HeaderConfig {
- HeaderConfig { magic, public_key: key }
+ pub const fn new(magic: [u8; 5], key: Option) -> ArchiveConfig {
+ ArchiveConfig { magic, public_key: key }
}
- /// Construct a new [`HeaderConfig`] struct.
+ /// Construct a new [`ArchiveConfig`] struct.
/// ```
- /// use vach::prelude::HeaderConfig;
- /// let config = HeaderConfig::new(*b"_TEST");
+ /// use vach::prelude::ArchiveConfig;
+ /// let config = ArchiveConfig::new(*b"_TEST");
/// ```
#[cfg(not(feature = "crypto"))]
- pub const fn new(magic: [u8; 5]) -> HeaderConfig {
- HeaderConfig { magic }
+ pub const fn new(magic: [u8; 5]) -> ArchiveConfig {
+ ArchiveConfig { magic }
}
- /// Shorthand to load and parse an ed25519 public key from a `Read` handle, into this `HeaderConfig`,
+ /// Shorthand to load and parse an ed25519 public key from a [`Read`] handle, into this [`ArchiveConfig`],
/// ```
- /// use vach::{utils::gen_keypair, prelude::HeaderConfig};
- /// let mut config = HeaderConfig::default();
+ /// use vach::{crypto_utils::gen_keypair, prelude::ArchiveConfig};
+ /// let mut config = ArchiveConfig::default();
/// let keypair_bytes = gen_keypair().to_bytes();
/// config.load_public_key(&keypair_bytes[32..]).unwrap();
/// ```
@@ -55,29 +55,29 @@ impl HeaderConfig {
#[inline]
#[cfg(feature = "crypto")]
#[cfg_attr(docsrs, doc(cfg(feature = "crypto")))]
- pub fn load_public_key(&mut self, handle: T) -> InternalResult<()> {
- use crate::utils::read_public_key;
+ pub fn load_public_key(&mut self, handle: T) -> InternalResult {
+ use crate::crypto_utils::read_public_key;
self.public_key = Some(read_public_key(handle)?);
Ok(())
}
- /// Shorthand to load a PublicKey into the HeaderConfig
+ /// Shorthand to load a PublicKey into the [ArchiveConfig]
#[cfg(feature = "crypto")]
#[cfg_attr(docsrs, doc(cfg(feature = "crypto")))]
- pub fn key(mut self, public_key: crypto::PublicKey) -> HeaderConfig {
+ pub fn key(mut self, public_key: crypto::PublicKey) -> ArchiveConfig {
self.public_key = Some(public_key);
self
}
- /// Setter for the magic into a HeaderConfig
- pub fn magic(mut self, magic: [u8; 5]) -> HeaderConfig {
+ /// Setter for the magic into a [ArchiveConfig]
+ pub fn magic(mut self, magic: [u8; 5]) -> ArchiveConfig {
self.magic = magic;
self
}
}
-impl fmt::Display for HeaderConfig {
+impl fmt::Display for ArchiveConfig {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
#[rustfmt::skip]
let has_pk = {
@@ -87,7 +87,7 @@ impl fmt::Display for HeaderConfig {
write!(
f,
- "[HeaderConfig] magic: {}, has_public_key: {}",
+ "[ArchiveConfig] magic: {}, has_public_key: {}",
match str::from_utf8(&self.magic) {
Ok(magic) => {
magic
@@ -102,18 +102,18 @@ impl fmt::Display for HeaderConfig {
}
#[cfg(feature = "crypto")]
-impl Default for HeaderConfig {
+impl Default for ArchiveConfig {
#[inline(always)]
fn default() -> Self {
- HeaderConfig::new(*crate::DEFAULT_MAGIC, None)
+ ArchiveConfig::new(*crate::DEFAULT_MAGIC, None)
}
}
#[cfg(not(feature = "crypto"))]
-impl Default for HeaderConfig {
+impl Default for ArchiveConfig {
#[inline(always)]
fn default() -> Self {
- HeaderConfig::new(*crate::DEFAULT_MAGIC)
+ ArchiveConfig::new(*crate::DEFAULT_MAGIC)
}
}
@@ -144,16 +144,13 @@ impl Header {
pub const VERSION_SIZE: usize = 2;
pub const CAPACITY_SIZE: usize = 2;
- /// Validates a `Header` with a template `HeaderConfig`
+ /// Validates a `Header` with a template [ArchiveConfig]
/// ### Errors
/// - (in)validation of magic and archive version
- pub fn validate(config: &HeaderConfig, header: &Header) -> InternalResult<()> {
+ pub fn validate(config: &ArchiveConfig, header: &Header) -> InternalResult {
// Validate magic
if header.magic != config.magic {
- return Err(InternalError::ValidationError(format!(
- "Invalid magic found in Header, possible incompatibility with given source. Magic found {:?}",
- header.magic
- )));
+ return Err(InternalError::MalformedArchiveSource(header.magic));
};
// Validate version
diff --git a/vach/src/global/reg_entry.rs b/vach/src/global/reg_entry.rs
index 9f526d85..383147c3 100644
--- a/vach/src/global/reg_entry.rs
+++ b/vach/src/global/reg_entry.rs
@@ -1,7 +1,7 @@
use crate::global::flags::Flags;
use std::{io::Read, fmt};
-use super::{error::InternalError, result::InternalResult};
+use super::result::InternalResult;
#[cfg(feature = "crypto")]
use crate::crypto;
@@ -44,7 +44,6 @@ impl RegistryEntry {
/// ### Errors
/// Produces `io` errors and if the bytes in the id section is not valid UTF-8
pub(crate) fn from_handle(mut handle: T) -> InternalResult<(Self, String)> {
- #![allow(clippy::uninit_assumed_init)]
let mut buffer: [u8; RegistryEntry::MIN_SIZE] = [0u8; RegistryEntry::MIN_SIZE];
handle.read_exact(&mut buffer)?;
@@ -69,7 +68,9 @@ impl RegistryEntry {
// If the `crypto` feature is turned off then the bytes are just read then discarded
#[cfg(feature = "crypto")]
{
- let sig: crypto::Signature = match sig_bytes.try_into() {
+ use super::error::InternalError;
+
+ let sig = match crypto::Signature::try_from(sig_bytes) {
Ok(sig) => sig,
Err(err) => return Err(InternalError::ParseError(err.to_string())),
};
diff --git a/vach/src/global/result.rs b/vach/src/global/result.rs
index 251ef322..91124069 100644
--- a/vach/src/global/result.rs
+++ b/vach/src/global/result.rs
@@ -1,4 +1,4 @@
use super::error::InternalError;
/// Internal `Result` type alias used by `vach`. Basically equal to: `Result`
-pub type InternalResult = Result;
+pub type InternalResult = Result;
diff --git a/vach/src/lib.rs b/vach/src/lib.rs
index 50ab0ce5..96ec2928 100644
--- a/vach/src/lib.rs
+++ b/vach/src/lib.rs
@@ -6,7 +6,7 @@
/*!
#### A simple archiving format, designed for storing assets in compact secure containers
-`vach`, pronounced like "puck" but with a "v", is an archiving and resource transmission format. It was built to be secure, contained and protected. It was, in fact, designed by the [SCP](https://en.wikipedia.org/wiki/SCP_Foundation) to keep your anomalous assets compact and secure during transmission. `vach` also has in-built support for multiple compression schemes (LZ4, Snappy and Brolti), [data signing](https://github.com/dalek-cryptography/ed25519-dalek), leaf [bitflags](https://docs.rs/vach/latest/vach/archive/struct.Flags.html), [encryption](https://docs.rs/aes-gcm/latest/aes_gcm/) and some degree of archive customization. Check out the `vach` spec at **[spec.txt](https://github.com/zeskeertwee/virtfs-rs/blob/main/spec/main.txt)**. Any and *all* help will be much appreciated, especially proof reading the docs and code review.
+`vach`, pronounced like "puck" but with a "v", is an archiving and resource transmission format. It was built to be secure, contained and protected. It was, in fact, designed by the [SCP](https://en.wikipedia.org/wiki/SCP_Foundation) to keep your anomalous assets compact and secure during transmission. A big benefit of `vach` is the fine grained control it grants it's users, as it allows for per-entry independent configuration.`vach` also has in-built support for multiple compression schemes (LZ4, Snappy and Brolti), [data signing](https://github.com/dalek-cryptography/ed25519-dalek), leaf [bitflags](https://docs.rs/vach/latest/vach/archive/struct.Flags.html), [encryption](https://docs.rs/aes-gcm/latest/aes_gcm/) and some degree of archive customization. Check out the `vach` spec at **[spec.txt](https://github.com/zeskeertwee/vach/blob/main/spec/main.txt)**. Any and *all* help will be much appreciated, especially proof reading the docs and code review.
### 👄 Terminologies
@@ -15,14 +15,10 @@
- **Entry:** Some data in the registry section of a `vach` source on an corresponding [leaf](crate::builder::Leaf). For example, `{ id: footstep.wav, location: 45, offset: 2345, flags: 0b0000_0000_0000_0000u16 }`.
### 🔫 Cargo Features
-- The `multithreaded` feature pulls [rayon](https://crates.io/crates/rayon) as a dependency and adds `Send + Sync` as a trait bound to many generic types.
- This allows for the parallelization of the `Builder::dump(---)` function and adds a new `Archive::fetch_batch(---)` method, with more functions getting parallelization on the way.
-
- > Turning this feature on adds a several new dependencies that would be completely unnecessary for a smaller scope, its only benefits when several entries are required at one moment there can be fetched simultaneously_
-
-- The `compression` feature pulls `snap`, `lz4_flex` and `brotli` as dependencies and allows for compression in `vach` archives.
-- The `loader` and `builder` features are turned on by default, turning them off turns off their respective modules. For example a game only needs the `loader` feature but a tool for asset packing would only need the `builder` feature.
-- The `crypto` feature enables encryption and authentication functionality by pulling the `ed25519_dalek` and `aes_gcm` crates
+- `archive` and `builder` (default): Turning them off turns off their respective modules. For example a game only needs the `archive` feature but a tool for packing assets would only need the `builder` feature.
+- `multithreaded`: Pulls [rayon](https://crates.io/crates/rayon) as a dependency and adds `Send + Sync` as a trait bound to many generic types. This allows for the auto-parallelization of the `Builder::dump(---)` function.
+- `compression`: Pulls `snap`, `lz4_flex` and `brotli` as dependencies and allows for compression in `vach` archives.
+- `crypto`: Enables encryption and authentication functionality by pulling the `ed25519_dalek` and `aes_gcm` crates
### 🀄 Show me some code _dang it!_
@@ -36,9 +32,10 @@ let config = BuilderConfig::default();
let mut builder = Builder::default();
// Use `Builder::add( reader, ID )` to add data to the write queue
-// builder.add(File::open("test_data/background.wav")?, "ambient").unwrap();
-// builder.add(File::open("test_data/footstep.wav")?, "ftstep").unwrap();
-builder.add(Cursor::new(b"Hello, Cassandra!"), "hello").unwrap();
+// Adds any data that implements `io::Read`
+builder.add(File::open("test_data/background.wav")?, "ambient").unwrap();
+builder.add(&[12, 23, 34, 45, 56, 67, 78, 90, 69], "ftstep").unwrap();
+builder.add(b"Hello, Cassandra!", "hello").unwrap();
// let mut target = File::create("sounds.vach")?;
let mut target = Cursor::new(Vec::new());
@@ -57,9 +54,9 @@ let target = File::open("sounds.vach")?;
let archive = Archive::from_handle(target)?;
let resource: Resource = archive.fetch("ambient")?;
-// By default all resources are flagged as NOT secured
+// By default all resources are flagged as NOT authenticated
println!("{}", Sound::new(&resource.data)?);
-assert!(!resource.secured);
+assert!(!resource.authenticated);
let mut buffer = Vec::new();
let (flags, content_version, is_secure) = archive.fetch_write("ftstep", &mut buffer)?;
@@ -70,30 +67,50 @@ let (flags, content_version, is_secure) = archive.fetch_write("ftstep", &mut buf
```ignore
use std::{io::Cursor, fs::File};
use vach::prelude::{Builder, BuilderConfig, Keypair};
-use vach::utils::gen_keypair;
+use vach::crypto_utils::gen_keypair;
let keypair: Keypair = gen_keypair();
let config: BuilderConfig = BuilderConfig::default().keypair(keypair);
let mut builder = Builder::default();
// Use `Builder::add( reader, ID )` to add data to the write queue
-// builder.add(File::open("test_data/background.wav")?, "ambient").unwrap();
-// builder.add(File::open("test_data/footstep.wav")?, "ftstep").unwrap();
-builder.add(Cursor::new(b"Hello, Cassandra!"), "hello").unwrap();
+builder.add(File::open("test_data/background.wav")?, "ambient").unwrap();
+builder.add(vec![12, 23, 34, 45, 56, 67, 78], "ftstep").unwrap();
+builder.add(b"Hello, Cassandra!" as &[u8], "hello").unwrap();
-// let mut target = File::create("sounds.vach")?;
-let mut target = Cursor::new(Vec::new());
+let mut target = File::create("sounds.vach")?;
+builder.dump(&mut target, &config).unwrap();
+let mut target = Cursor::new(Vec::new());
builder.dump(&mut target, &config).unwrap();
```
+##### > Load resources from a signed `.vach` source
+
+```ignore
+// Load public_key
+let mut public_key = File::open(PUBLIC_KEY)?;
+let mut public_key_bytes: [u8; crate::PUBLIC_KEY_LENGTH];
+public_key.read_exact(&mut public_key_bytes)?;
+
+// Build the Loader config
+let mut config = ArchiveConfig::default().key(PublicKey::from_bytes(&public_key_bytes)?);
+
+let target = File::open("sounds.vach")?;
+let archive = Archive::with_config(target, &config)?;
+
+// Resources are marked as secure (=true) if the signatures match the data
+let resource = archive.fetch("ambient")?;
+println!("{}", Sound::new(&resource.data)?);
+assert!(resource.authenticated);
+```
##### > Serialize and de-serialize a `Keypair`, `SecretKey` and `PublicKey`
-As `Keypair`, `SecretKey` and `PublicKey` are reflected from [ed25519_dalek](https://docs.rs/ed25519-dalek/latest/ed25519_dalek/), you could refer to their docs to read further about them.
+As `Keypair`, `SecretKey` and `PublicKey` are reflected from [ed25519_dalek](https://docs.rs/ed25519-dalek/latest/ed25519_dalek/), you could refer to their docs to read further about them. These are needed for any cryptography related procedures,
```ignore
use vach::prelude::{Keypair, SecretKey, PublicKey};
-use vach::utils::gen_keypair;
+use vach::crypto_utils::gen_keypair;
// Generate keys
let keypair : Keypair = gen_keypair();
@@ -103,32 +120,12 @@ let public : PublicKey = keypair.public;
// Serialize
let public_key_bytes : [u8; vach::PUBLIC_KEY_LENGTH] = public.to_bytes();
let secret_key_bytes : [u8; vach::SECRET_KEY_LENGTH] = secret.to_bytes();
-// let keypair_bytes : [u8; vach::KEYPAIR_LENGTH] = keypair.to_bytes();
+let keypair_bytes : [u8; vach::KEYPAIR_LENGTH] = keypair.to_bytes();
// Deserialize
let public_key : PublicKey = PublicKey::from_bytes(&public_key_bytes).unwrap();
let secret_key : SecretKey = SecretKey::from_bytes(&secret_key_bytes).unwrap();
-// let keypair : Keypair = Keypair::from_bytes(&keypair_bytes).unwrap();
-```
-
-##### > Load resources from a signed `.vach` source
-
-```ignore
-// Load public_key
-let mut public_key = File::open(PUBLIC_KEY)?;
-let mut public_key_bytes: [u8; crate::PUBLIC_KEY_LENGTH];
-public_key.read_exact(&mut public_key_bytes)?;
-
-// Build the Loader config
-let mut config = HeaderConfig::default().key(PublicKey::from_bytes(&public_key_bytes)?);
-
-let target = File::open("sounds.vach")?;
-let archive = Archive::with_config(target, &config)?;
-
-// Resources are marked as secure (=true) if the signatures match the data
-let resource = archive.fetch("ambient")?;
-println!("{}", Sound::new(&resource.data)?);
-assert!(resource.secured);
+let keypair : Keypair = Keypair::from_bytes(&keypair_bytes).unwrap();
```
*/
@@ -136,8 +133,8 @@ assert!(resource.secured);
mod tests;
pub(crate) mod global;
-#[cfg(feature = "loader")]
-#[cfg_attr(docsrs, doc(cfg(feature = "loader")))]
+#[cfg(feature = "archive")]
+#[cfg_attr(docsrs, doc(cfg(feature = "archive")))]
pub(crate) mod loader;
#[cfg(feature = "builder")]
@@ -151,7 +148,7 @@ pub use rand;
#[cfg(feature = "multithreaded")]
#[cfg_attr(docsrs, doc(cfg(feature = "multithreaded")))]
-pub use {rayon, num_cpus};
+pub use rayon;
/// Current [`vach`](crate) spec version. increments by ten with every spec change
pub const VERSION: u16 = 30;
@@ -180,13 +177,13 @@ pub const MAGIC_LENGTH: usize = 5;
/// Consolidated import for crate logic; This module stores all `structs` associated with this crate. Constants can be accesses [directly](#constants) with `crate::`
pub mod prelude {
pub use crate::global::{
- error::InternalError, result::InternalResult, flags::Flags, header::HeaderConfig, reg_entry::RegistryEntry,
+ error::InternalError, result::InternalResult, flags::Flags, header::ArchiveConfig, reg_entry::RegistryEntry,
};
#[cfg(feature = "crypto")]
pub use crate::crypto::*;
- #[cfg(feature = "loader")]
+ #[cfg(feature = "archive")]
pub use crate::archive::*;
#[cfg(feature = "builder")]
@@ -213,16 +210,16 @@ pub mod builder {
}
/// Loader-based logic and data-structures
-#[cfg(feature = "loader")]
-#[cfg_attr(docsrs, doc(cfg(feature = "loader")))]
+#[cfg(feature = "archive")]
+#[cfg_attr(docsrs, doc(cfg(feature = "archive")))]
pub mod archive {
pub use crate::loader::{archive::Archive, resource::Resource};
pub use crate::global::{
- reg_entry::RegistryEntry, header::HeaderConfig, error::InternalError, result::InternalResult, flags::Flags,
+ reg_entry::RegistryEntry, header::ArchiveConfig, error::InternalError, result::InternalResult, flags::Flags,
};
#[cfg(feature = "compression")]
pub use crate::global::compressor::CompressionAlgorithm;
}
/// Some utility functions to keep you happy
-pub mod utils;
+pub mod crypto_utils;
diff --git a/vach/src/loader/archive.rs b/vach/src/loader/archive.rs
index 0ac61cda..9f31e576 100644
--- a/vach/src/loader/archive.rs
+++ b/vach/src/loader/archive.rs
@@ -2,7 +2,7 @@ use std::{
str,
io::{self, Read, Seek, SeekFrom, Write},
collections::HashMap,
- sync::{Arc, Mutex},
+ sync::Mutex,
};
use super::resource::Resource;
@@ -10,7 +10,7 @@ use crate::{
global::{
error::InternalError,
flags::Flags,
- header::{Header, HeaderConfig},
+ header::{Header, ArchiveConfig},
reg_entry::RegistryEntry,
result::InternalResult,
},
@@ -24,14 +24,20 @@ use crate::global::compressor::{Compressor, CompressionAlgorithm};
/// A wrapper for loading data from archive sources.
/// It also provides query functions for fetching [`Resource`]s and [`RegistryEntry`]s.
-/// Specify custom `MAGIC` or provide a `PublicKey` for decrypting and authenticating resources using [`HeaderConfig`]
+/// Specify custom `MAGIC` or provide a `PublicKey` for decrypting and authenticating resources using [`ArchiveConfig`].
/// > **A word of advice:**
/// > Does not buffer the underlying handle, so consider wrapping `handle` in a `BufReader`
+#[derive(Debug)]
pub struct Archive {
+ /// Wrapping `handle` in a Mutex means that we only ever lock when reading from the underlying buffer, thus ensuring maximum performance across threads
+ /// Since all the other work is done per thread
+ handle: Mutex,
+
+ // Archive metadata
header: Header,
- handle: Arc>,
entries: HashMap,
+ // Optional parts
#[cfg(feature = "crypto")]
decryptor: Option,
#[cfg(feature = "crypto")]
@@ -40,7 +46,7 @@ pub struct Archive {
impl std::fmt::Display for Archive {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
- let total_size = self
+ let bytes = self
.entries
.iter()
.map(|(_, entry)| entry.offset)
@@ -49,128 +55,99 @@ impl std::fmt::Display for Archive {
write!(
f,
- "[Archive Header] Version: {}, Magic: {:?}, Members: {}, Compressed Size: {}B, Header-Flags: <{:#x} : {:#016b}>",
+ "[Archive Header] Version: {}, Magic: {:?}, Members: {}, Compressed Size: {bytes}B, Header-Flags: <{:#x} : {:#016b}>",
self.header.arch_version,
self.header.magic,
self.entries.len(),
- total_size,
self.header.flags.bits,
self.header.flags.bits,
)
}
}
-impl std::fmt::Debug for Archive {
- fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
- let mut f = f.debug_struct("Archive");
- f.field("header", &self.header);
- f.field("entries", &self.entries);
-
- #[cfg(feature = "crypto")]
- f.field("key", &self.key);
-
- f.finish()
- }
-}
-
-#[cfg(not(feature = "crypto"))]
-type ProcessDependecies = ();
-#[cfg(feature = "crypto")]
-type ProcessDependecies<'a> = (&'a Option, &'a Option);
-
impl Archive {
/// Consume the [Archive] and return the underlying handle
pub fn into_inner(self) -> InternalResult {
- match Arc::try_unwrap(self.handle) {
- Ok(mutex) => match mutex.into_inner() {
- Ok(inner) => Ok(inner),
- Err(err) => Err(InternalError::SyncError(format!(
- "Trying to consume a poisoned mutex {}",
- err
- ))),
- },
- Err(_) => Err(InternalError::SyncError(
- "Cannot consume this archive as other copy-references (ARC) to it exist".to_string(),
- )),
+ match self.handle.into_inner() {
+ Ok(inner) => Ok(inner),
+ Err(err) => Err(InternalError::SyncError(format!(
+ "Trying to consume a poisoned mutex {}",
+ err
+ ))),
}
}
- /// Helps in parallelized `Resource` fetching
+ // Decompress and|or decrypt the data
#[inline(never)]
- fn process_raw(
- dependencies: ProcessDependecies, independent: (&RegistryEntry, &str, Vec),
- ) -> InternalResult<(Vec, bool)> {
+ fn process(&self, values: (&RegistryEntry, &str, Vec)) -> InternalResult<(Vec, bool)> {
/* Literally the hottest function in the block (🕶) */
- let (entry, id, mut raw) = independent;
+ // buffer_a originally contains the raw data
+ let (entry, id, mut buffer_a) = values;
+ let buffer_b;
let mut is_secure = false;
// Signature validation
// Validate signature only if a public key is passed with Some(PUBLIC_KEY)
#[cfg(feature = "crypto")]
- if let Some(pk) = dependencies.1 {
- let raw_size = raw.len();
+ if let Some(pk) = self.key {
+ let raw_size = buffer_a.len();
// If there is an error the data is flagged as invalid
- raw.extend_from_slice(id.as_bytes());
+ buffer_a.extend_from_slice(id.as_bytes());
if let Some(signature) = entry.signature {
- is_secure = pk.verify_strict(&raw, &signature).is_ok();
+ is_secure = pk.verify_strict(&buffer_a, &signature).is_ok();
}
- raw.truncate(raw_size);
+ buffer_a.truncate(raw_size);
}
// Add read layers
// 1: Decryption layer
if entry.flags.contains(Flags::ENCRYPTED_FLAG) {
#[cfg(feature = "crypto")]
- match dependencies.0 {
+ match self.decryptor.as_ref() {
Some(dc) => {
- raw = dc.decrypt(&raw)?;
- },
- None => {
- return Err(InternalError::NoKeypairError(format!(
- "Encountered encrypted Resource: {} but no decryption key(public key) was provided",
- id
- )))
+ buffer_b = dc.decrypt(&buffer_a)?;
},
+ None => return Err(InternalError::NoKeypairError),
}
#[cfg(not(feature = "crypto"))]
{
- return Err(InternalError::MissingFeatureError("crypto".to_string()));
+ return Err(InternalError::MissingFeatureError("crypto"));
}
+ } else {
+ buffer_b = buffer_a.clone();
}
// 2: Decompression layer
if entry.flags.contains(Flags::COMPRESSED_FLAG) {
#[cfg(feature = "compression")]
{
- let mut buffer = vec![];
+ // Clear data in buffer_a
+ buffer_a.clear();
if entry.flags.contains(Flags::LZ4_COMPRESSED) {
- Compressor::new(raw.as_slice()).decompress(CompressionAlgorithm::LZ4, &mut buffer)?
+ Compressor::new(buffer_b.as_slice()).decompress(CompressionAlgorithm::LZ4, &mut buffer_a)?
} else if entry.flags.contains(Flags::BROTLI_COMPRESSED) {
- Compressor::new(raw.as_slice()).decompress(CompressionAlgorithm::Brotli(0), &mut buffer)?
+ Compressor::new(buffer_b.as_slice()).decompress(CompressionAlgorithm::Brotli(0), &mut buffer_a)?
} else if entry.flags.contains(Flags::SNAPPY_COMPRESSED) {
- Compressor::new(raw.as_slice()).decompress(CompressionAlgorithm::Snappy, &mut buffer)?
+ Compressor::new(buffer_b.as_slice()).decompress(CompressionAlgorithm::Snappy, &mut buffer_a)?
} else {
return InternalResult::Err(InternalError::OtherError(
format!("Unable to determine the compression algorithm used for entry with ID: {id}").into(),
));
};
-
- raw = buffer
}
#[cfg(not(feature = "compression"))]
- return Err(InternalError::MissingFeatureError("compression".to_string()));
+ return Err(InternalError::MissingFeatureError("compression"));
+ } else {
+ buffer_a = buffer_b;
};
- let mut buffer = vec![];
- raw.as_slice().read_to_end(&mut buffer)?;
-
- Ok((buffer, is_secure))
+ Ok((buffer_a, is_secure))
}
}
@@ -182,23 +159,23 @@ where
/// Load an [`Archive`] with the default settings from a source.
/// The same as doing:
/// ```ignore
- /// Archive::with_config(HANDLE, &HeaderConfig::default())?;
+ /// Archive::with_config(HANDLE, &ArchiveConfig::default())?;
/// ```
/// ### Errors
/// - If the internal call to `Archive::with_config(-)` returns an error
#[inline(always)]
pub fn from_handle(handle: T) -> InternalResult> {
- Archive::with_config(handle, &HeaderConfig::default())
+ Archive::with_config(handle, &ArchiveConfig::default())
}
/// Given a read handle, this will read and parse the data into an [`Archive`] struct.
- /// Pass a reference to `HeaderConfig` and it will be used to validate the source and for further configuration.
+ /// Pass a reference to [ArchiveConfig] and it will be used to validate the source and for further configuration.
/// ### Errors
/// - If parsing fails, an `Err(---)` is returned.
/// - The archive fails to validate
/// - `io` errors
/// - If any `ID`s are not valid UTF-8
- pub fn with_config(mut handle: T, config: &HeaderConfig) -> InternalResult> {
+ pub fn with_config(mut handle: T, config: &ArchiveConfig) -> InternalResult> {
// Start reading from the start of the input
handle.seek(SeekFrom::Start(0))?;
@@ -231,7 +208,7 @@ where
Ok(Archive {
header,
- handle: Arc::new(Mutex::new(handle)),
+ handle: Mutex::new(handle),
key: config.public_key,
entries,
decryptor,
@@ -242,7 +219,7 @@ where
{
Ok(Archive {
header,
- handle: Arc::new(Mutex::new(handle)),
+ handle: Mutex::new(handle),
entries,
})
}
@@ -252,21 +229,19 @@ where
pub(crate) fn fetch_raw(&self, entry: &RegistryEntry) -> InternalResult> {
let mut buffer = Vec::with_capacity(entry.offset as usize);
- {
- let mut guard = match self.handle.lock() {
- Ok(guard) => guard,
- Err(_) => {
- return Err(InternalError::SyncError(
- "The Mutex in this Archive has been poisoned, an error occurred somewhere".to_string(),
- ))
- },
- };
+ let mut guard = match self.handle.lock() {
+ Ok(guard) => guard,
+ Err(_) => {
+ return Err(InternalError::SyncError(
+ "The Mutex in this Archive has been poisoned, an error occurred somewhere".to_string(),
+ ))
+ },
+ };
- guard.seek(SeekFrom::Start(entry.location))?;
- let mut take = guard.by_ref().take(entry.offset);
+ guard.seek(SeekFrom::Start(entry.location))?;
+ let mut take = guard.by_ref().take(entry.offset);
- take.read_to_end(&mut buffer)?;
- }
+ take.read_to_end(&mut buffer)?;
Ok(buffer)
}
@@ -274,10 +249,7 @@ where
/// Fetch a [`RegistryEntry`] from this [`Archive`].
/// This can be used for debugging, as the [`RegistryEntry`] holds information on data with the adjacent ID.
pub fn fetch_entry(&self, id: impl AsRef) -> Option {
- match self.entries.get(id.as_ref()) {
- Some(entry) => Some(entry.clone()),
- None => None,
- }
+ self.entries.get(id.as_ref()).cloned()
}
/// Returns an immutable reference to the underlying [`HashMap`]. This hashmap stores [`RegistryEntry`] values and uses `String` keys.
@@ -298,7 +270,7 @@ where
T: Read + Seek,
{
/// Fetch a [`Resource`] with the given `ID`.
- /// If the `ID` does not exist within the source, `Err(---)` is returned.
+ /// If the `ID` does not exist within the source, [`InternalError::MissingResourceError`] is returned.
pub fn fetch(&self, id: impl AsRef) -> InternalResult {
// The reason for this function's unnecessary complexity is it uses the provided functions independently, thus preventing an unnecessary allocation [MAYBE TOO MUCH?]
if let Some(entry) = self.fetch_entry(&id) {
@@ -307,125 +279,37 @@ where
// Prepare contextual variables
let independent = (&entry, id.as_ref(), raw);
- #[cfg(feature = "crypto")]
- let dependencies = (&self.decryptor, &self.key);
- #[cfg(not(feature = "crypto"))]
- let dependencies = ();
-
- let (buffer, is_secure) = Archive::::process_raw(dependencies, independent)?;
+ // Decompress and|or decrypt the data
+ let (buffer, is_secure) = self.process(independent)?;
Ok(Resource {
content_version: entry.content_version,
flags: entry.flags,
data: buffer,
- secured: is_secure,
+ authenticated: is_secure,
})
} else {
- #[rustfmt::skip]
- return Err(InternalError::MissingResourceError(format!( "Resource not found: {}", id.as_ref() )));
+ return Err(InternalError::MissingResourceError(id.as_ref().to_string()));
}
}
/// Fetch data with the given `ID` and write it directly into the given `target: impl Read`.
/// Returns a tuple containing the `Flags`, `content_version` and `authenticity` (boolean) of the data.
- /// ### Errors
- /// - If no leaf with the specified `ID` exists
- /// - Any `io::Seek(-)` errors
- /// - Other `io` related errors
- pub fn fetch_write(&self, id: impl AsRef, mut target: &mut dyn Write) -> InternalResult<(Flags, u8, bool)> {
+ /// If no leaf with the specified `ID` exists, [`InternalError::MissingResourceError`] is returned.
+ pub fn fetch_write(&self, id: impl AsRef, target: &mut dyn Write) -> InternalResult<(Flags, u8, bool)> {
if let Some(entry) = self.fetch_entry(&id) {
let raw = self.fetch_raw(&entry)?;
// Prepare contextual variables
let independent = (&entry, id.as_ref(), raw);
- #[cfg(feature = "crypto")]
- let dependencies = (&self.decryptor, &self.key);
- #[cfg(not(feature = "crypto"))]
- let dependencies = ();
-
- let (buffer, is_secure) = Archive::::process_raw(dependencies, independent)?;
+ // Decompress and|or decrypt the data
+ let (buffer, is_secure) = self.process(independent)?;
- io::copy(&mut buffer.as_slice(), &mut target)?;
+ io::copy(&mut buffer.as_slice(), target)?;
Ok((entry.flags, entry.content_version, is_secure))
} else {
- #[rustfmt::skip]
- return Err(InternalError::MissingResourceError(format!( "Resource not found: {}", id.as_ref() )));
- }
- }
-}
-
-#[cfg(feature = "multithreaded")]
-impl Archive
-where
- T: Read + Seek + Send + Sync,
-{
- /// Retrieves several resources in parallel. This is much faster than calling `Archive::fetch(---)` in a loop as it utilizes abstracted functionality.
- /// Use `Archive::fetch(---)` | `Archive::fetch_write(---)` in your own loop construct ([rayon] if you want) otherwise
- #[cfg_attr(docsrs, doc(cfg(feature = "multithreaded")))]
- pub fn fetch_batch(
- &self, items: I, num_threads: Option,
- ) -> InternalResult>>
- where
- I: Iterator + Send + Sync,
- I::Item: AsRef,
- {
- use rayon::prelude::*;
-
- // Attempt to pre-allocate HashMap
- let map = match items.size_hint().1 {
- Some(hint) => HashMap::with_capacity(hint),
- None => HashMap::new(),
- };
-
- // Prepare mutexes
- let processed = Arc::new(Mutex::new(map));
- let queue = Arc::new(Mutex::new(items));
-
- let num_threads = match num_threads {
- Some(num) => num,
- None => num_cpus::get(),
- };
-
- // Creates a thread-pool`
- (0..num_threads)
- .into_par_iter()
- .try_for_each(|_| -> InternalResult<()> {
- let mut produce = vec![];
-
- // Query next item on queue and process
- while let Some(id) = queue.lock().unwrap().next() {
- let id = id.as_ref();
- let resource = self.fetch(id);
-
- let string = id.to_string();
- if let Ok(mut guard) = processed.try_lock() {
- guard.insert(string, resource);
-
- // The lock is available, so we pop everything off the queue
- (0..produce.len()).for_each(|_| {
- let (id, res) = produce.pop().unwrap();
- guard.insert(id, res);
- });
- } else {
- produce.push((string, resource));
- }
- }
-
- Ok(())
- })?;
-
- match Arc::try_unwrap(processed) {
- Ok(mutex) => match mutex.into_inner() {
- Ok(inner) => Ok(inner),
- Err(err) => Err(InternalError::SyncError(format!(
- "Mutex has been poisoned! {}",
- err
- ))),
- },
- Err(_) => Err(InternalError::SyncError(
- "Cannot consume this HashMap as other references (ARC) to it exist".to_string(),
- )),
+ return Err(InternalError::MissingResourceError(id.as_ref().to_string()));
}
}
}
diff --git a/vach/src/loader/resource.rs b/vach/src/loader/resource.rs
index 4f2af19a..5b8b731a 100644
--- a/vach/src/loader/resource.rs
+++ b/vach/src/loader/resource.rs
@@ -7,6 +7,7 @@ use crate::{
/// Contains `data`, `flags` and `content_version` fields.
/// Is returned by [`archive.fetch(...)`](crate::archive::Archive)
#[non_exhaustive]
+#[derive(Debug)]
pub struct Resource {
/// The processed data, stored as a vector of bytes `Vec`.
pub data: Vec,
@@ -14,20 +15,9 @@ pub struct Resource {
pub flags: Flags,
/// The content version of the extracted archive entry
pub content_version: u8,
- /// If a [`Resource`] signature has checked for authenticity, corruption or obsolescence, then this value becomes false.
- /// By default a [`Resource`] is insecure
- pub secured: bool,
-}
-
-impl fmt::Debug for Resource {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- f.debug_struct("Resource")
- .field("data", &self.data)
- .field("flags", &self.flags)
- .field("content_version", &self.content_version)
- .field("secured", &self.secured)
- .finish()
- }
+ /// A [`Resource`] is checked for authenticity, corruption or obsolescence against it's signature.
+ /// If the checks pass, then this becomes true, this is always false if the `crypto` feature is off or if the data had no signature
+ pub authenticated: bool,
}
impl fmt::Display for Resource {
diff --git a/vach/src/tests/mod.rs b/vach/src/tests/mod.rs
index 9d609b56..743fef3e 100644
--- a/vach/src/tests/mod.rs
+++ b/vach/src/tests/mod.rs
@@ -22,8 +22,8 @@ const CUSTOM_FLAG_3: u32 = 0b0000_0000_0000_0000_0000_0000_1000_0000;
const CUSTOM_FLAG_4: u32 = 0b0000_0000_0000_0000_0000_0000_0001_0000;
#[test]
-#[cfg(feature = "loader")]
-fn custom_bitflags() -> InternalResult<()> {
+#[cfg(feature = "archive")]
+fn custom_bitflags() -> InternalResult {
let target = File::open(SIMPLE_TARGET)?;
let archive = Archive::from_handle(target)?;
let entry = archive.fetch_entry("poem").unwrap();
@@ -73,13 +73,13 @@ fn flags_set_intersects() {
}
#[test]
-#[cfg(all(feature = "builder", feature = "loader"))]
+#[cfg(all(feature = "builder", feature = "archive"))]
fn defaults() {
// The reason we are pulling the header directly from global namespace is because it's not exposed to the public API
// We still need to conduct tests on them tho.
use crate::global::header::Header;
- let _header_config = HeaderConfig::default();
+ let _header_config = ArchiveConfig::default();
let _header = Header::default();
let _registry_entry = RegistryEntry::empty();
let _leaf = Leaf::default();
@@ -90,13 +90,13 @@ fn defaults() {
#[test]
#[cfg(not(feature = "crypto"))]
-fn header_config() -> InternalResult<()> {
+fn header_config() -> InternalResult {
// `Header` is a private struct, ie pub(crate). So we need to grab it manually
use std::io::Read;
use crate::global::header::Header;
- // When "crypto" features is turned off `HeaderConfig::new(*b"VfACH")` takes a single argument
- let config = HeaderConfig::new(*b"VfACH");
+ // When "crypto" features is turned off `ArchiveConfig::new(*b"VfACH")` takes a single argument
+ let config = ArchiveConfig::new(*b"VfACH");
println!("{}", &config);
let mut header_data = [0u8; Header::BASE_SIZE];
@@ -104,15 +104,13 @@ fn header_config() -> InternalResult<()> {
file.read(&mut header_data)?;
let header = Header::from_handle(header_data.as_slice())?;
- println!("{}", header);
-
- Header::validate(&header, &config)?;
+ Header::validate(&config, &header)?;
Ok(())
}
#[test]
#[cfg(all(feature = "compression", feature = "builder"))]
-fn builder_no_signature() -> InternalResult<()> {
+fn builder_no_signature() -> InternalResult {
let mut builder = Builder::default();
let build_config = BuilderConfig::default();
@@ -145,8 +143,8 @@ fn builder_no_signature() -> InternalResult<()> {
}
#[test]
-#[cfg(all(feature = "compression", feature = "loader"))]
-fn simple_fetch() -> InternalResult<()> {
+#[cfg(all(feature = "compression", feature = "archive"))]
+fn simple_fetch() -> InternalResult {
let target = File::open(SIMPLE_TARGET)?;
let archive = Archive::from_handle(target)?;
let resource = archive.fetch("poem")?;
@@ -161,7 +159,7 @@ fn simple_fetch() -> InternalResult<()> {
assert_eq!(resource.data.len(), 345);
}
- assert!(!resource.secured);
+ assert!(!resource.authenticated);
assert!(resource.flags.contains(Flags::COMPRESSED_FLAG));
println!("{}", String::from_utf8(resource.data).unwrap());
@@ -175,7 +173,7 @@ fn simple_fetch() -> InternalResult<()> {
#[test]
#[cfg(all(feature = "builder", feature = "crypto"))]
-fn builder_with_signature() -> InternalResult<()> {
+fn builder_with_signature() -> InternalResult {
let mut builder = Builder::default();
let cb = |_: &str, entry: &RegistryEntry| {
@@ -200,12 +198,12 @@ fn builder_with_signature() -> InternalResult<()> {
}
#[test]
-#[cfg(all(feature = "loader", feature = "crypto"))]
-fn fetch_with_signature() -> InternalResult<()> {
+#[cfg(all(feature = "archive", feature = "crypto"))]
+fn fetch_with_signature() -> InternalResult {
let target = File::open(SIGNED_TARGET)?;
// Load keypair
- let mut config = HeaderConfig::default();
+ let mut config = ArchiveConfig::default();
let keypair = &KEYPAIR[crate::SECRET_KEY_LENGTH..];
config.load_public_key(keypair)?;
@@ -216,12 +214,12 @@ fn fetch_with_signature() -> InternalResult<()> {
// The adjacent resource was flagged to not be signed
let not_signed_resource = archive.fetch("not_signed")?;
assert!(!not_signed_resource.flags.contains(Flags::SIGNED_FLAG));
- assert!(!not_signed_resource.secured);
+ assert!(!not_signed_resource.authenticated);
// The adjacent resource was flagged to not be signed
let not_signed_resource = archive.fetch("not_signed")?;
assert!(!not_signed_resource.flags.contains(Flags::SIGNED_FLAG));
- assert!(!not_signed_resource.secured);
+ assert!(!not_signed_resource.authenticated);
// Check authenticity of retrieved data
println!("{}", song);
@@ -236,19 +234,19 @@ fn fetch_with_signature() -> InternalResult<()> {
assert_eq!(song.len(), 1977);
}
- assert!(resource.secured);
+ assert!(resource.authenticated);
assert!(resource.flags.contains(Flags::SIGNED_FLAG));
Ok(())
}
#[test]
-#[cfg(all(feature = "loader", feature = "crypto"))]
-fn fetch_write_with_signature() -> InternalResult<()> {
+#[cfg(all(feature = "archive", feature = "crypto"))]
+fn fetch_write_with_signature() -> InternalResult {
let target = File::open(SIGNED_TARGET)?;
// Load keypair
- let mut config = HeaderConfig::default();
+ let mut config = ArchiveConfig::default();
let keypair = &KEYPAIR[crate::SECRET_KEY_LENGTH..];
config.load_public_key(keypair)?;
@@ -277,8 +275,8 @@ fn fetch_write_with_signature() -> InternalResult<()> {
#[test]
#[cfg(feature = "crypto")]
-fn edcryptor_test() -> InternalResult<()> {
- use crate::utils::gen_keypair;
+fn edcryptor_test() -> InternalResult {
+ use crate::crypto_utils::gen_keypair;
use crate::crypto::Encryptor;
let pk = gen_keypair().public;
@@ -296,7 +294,7 @@ fn edcryptor_test() -> InternalResult<()> {
#[test]
#[cfg(all(feature = "compression", feature = "builder", feature = "crypto"))]
-fn builder_with_encryption() -> InternalResult<()> {
+fn builder_with_encryption() -> InternalResult {
let mut builder = Builder::new().template(Leaf::default().encrypt(true).compress(CompressMode::Never).sign(true));
let mut build_config = BuilderConfig::default();
@@ -321,12 +319,12 @@ fn builder_with_encryption() -> InternalResult<()> {
}
#[test]
-#[cfg(all(feature = "loader", feature = "crypto"))]
-fn fetch_from_encrypted() -> InternalResult<()> {
+#[cfg(all(feature = "archive", feature = "crypto"))]
+fn fetch_from_encrypted() -> InternalResult {
let target = File::open(ENCRYPTED_TARGET)?;
// Load keypair
- let mut config = HeaderConfig::default();
+ let mut config = ArchiveConfig::default();
let public_key = &KEYPAIR[crate::SECRET_KEY_LENGTH..];
config.load_public_key(public_key)?;
@@ -347,7 +345,7 @@ fn fetch_from_encrypted() -> InternalResult<()> {
assert_eq!(song.len(), 1977);
}
- assert!(resource.secured);
+ assert!(resource.authenticated);
assert!(!resource.flags.contains(Flags::COMPRESSED_FLAG));
assert!(resource.flags.contains(Flags::ENCRYPTED_FLAG));
@@ -355,9 +353,9 @@ fn fetch_from_encrypted() -> InternalResult<()> {
}
#[test]
-#[cfg(all(feature = "builder", feature = "loader", feature = "crypto"))]
-fn consolidated_example() -> InternalResult<()> {
- use crate::utils::{gen_keypair, read_keypair};
+#[cfg(all(feature = "builder", feature = "archive", feature = "crypto"))]
+fn consolidated_example() -> InternalResult {
+ use crate::crypto_utils::{gen_keypair, read_keypair};
use std::{io::Cursor, time::Instant};
const MAGIC: &[u8; 5] = b"CSDTD";
@@ -389,7 +387,7 @@ fn consolidated_example() -> InternalResult<()> {
println!("Building took: {}us", then.elapsed().as_micros());
// Load data
- let mut config = HeaderConfig::default().magic(*MAGIC);
+ let mut config = ArchiveConfig::default().magic(*MAGIC);
config.load_public_key(&keypair_bytes[32..])?;
let then = Instant::now();
@@ -411,7 +409,7 @@ fn consolidated_example() -> InternalResult<()> {
#[test]
#[cfg(all(feature = "compression", feature = "builder"))]
-fn test_compressors() -> InternalResult<()> {
+fn test_compressors() -> InternalResult {
use std::io::Cursor;
const INPUT_LEN: usize = 4096;
@@ -469,9 +467,10 @@ fn test_compressors() -> InternalResult<()> {
}
#[test]
-#[cfg(all(feature = "multithreaded", feature = "builder", feature = "loader"))]
-fn test_batch_fetching() -> InternalResult<()> {
- use std::io::Cursor;
+#[cfg(all(feature = "multithreaded", feature = "builder", feature = "archive"))]
+fn test_batch_fetching() -> InternalResult {
+ use std::{io::Cursor, collections::HashMap};
+ use rayon::prelude::*;
// Define input constants
const INPUT_LEN: usize = 8;
@@ -496,7 +495,11 @@ fn test_batch_fetching() -> InternalResult<()> {
builder.dump(&mut target, &BuilderConfig::default())?;
let archive = Archive::from_handle(target)?;
- let mut resources = archive.fetch_batch(ids.iter().map(|id| id.as_str()), None)?;
+ let mut resources = ids
+ .as_slice()
+ .par_iter()
+ .map(|id| (id.as_str(), archive.fetch(&id)))
+ .collect::>();
// Tests and checks
assert!(resources.get("NON_EXISTENT").is_none());
@@ -507,7 +510,6 @@ fn test_batch_fetching() -> InternalResult<()> {
Err(err) => match err {
InternalError::MissingResourceError(_) => {
resources.remove("ERRORS");
- drop(ids);
},
specific => return Err(specific),
diff --git a/vach/src/writer/builder/config.rs b/vach/src/writer/builder/config.rs
index 8b1e1f39..b55a5c29 100644
--- a/vach/src/writer/builder/config.rs
+++ b/vach/src/writer/builder/config.rs
@@ -1,7 +1,6 @@
-use std::io;
use std::fmt::Debug;
-use crate::global::{flags::Flags, result::InternalResult, reg_entry::RegistryEntry};
+use crate::global::{flags::Flags, reg_entry::RegistryEntry};
#[cfg(feature = "crypto")]
use crate::crypto;
@@ -98,8 +97,8 @@ impl<'a> BuilderConfig<'a> {
/// ### Errors
/// If the call to `::utils::read_keypair()` fails to parse the data from the handle
#[cfg(feature = "crypto")]
- pub fn load_keypair(&mut self, handle: T) -> InternalResult<()> {
- self.keypair = Some(crate::utils::read_keypair(handle)?);
+ pub fn load_keypair(&mut self, handle: T) -> crate::global::result::InternalResult {
+ self.keypair = Some(crate::crypto_utils::read_keypair(handle)?);
Ok(())
}
}
diff --git a/vach/src/writer/builder/mod.rs b/vach/src/writer/builder/mod.rs
index 7872c72d..f8cc84ba 100644
--- a/vach/src/writer/builder/mod.rs
+++ b/vach/src/writer/builder/mod.rs
@@ -1,9 +1,10 @@
use std::io::{BufWriter, Write, Seek, SeekFrom};
use std::collections::HashSet;
use std::path::Path;
+use std::sync::atomic::{AtomicU64, AtomicUsize};
use std::sync::{
Arc, Mutex,
- atomic::{Ordering, AtomicUsize},
+ atomic::{Ordering},
};
mod config;
@@ -50,14 +51,15 @@ impl<'a> Builder<'a> {
}
/// Appends a read handle wrapped in a [`Leaf`] into the processing queue.
- /// The `data` is wrapped in the default [`Leaf`].
+ /// The `data` is wrapped in the default [`Leaf`], without cloning the original data.
/// The second argument is the `ID` with which the embedded data will be tagged
/// ### Errors
/// Returns an `Err(())` if a Leaf with the specified ID exists.
- pub fn add(&mut self, data: D, id: impl AsRef) -> InternalResult<()> {
+ pub fn add(&mut self, data: D, id: impl AsRef) -> InternalResult {
let leaf = Leaf::from_handle(data)
.id(id.as_ref().to_string())
.template(&self.leaf_template);
+
self.add_leaf(leaf)?;
Ok(())
}
@@ -74,7 +76,7 @@ impl<'a> Builder<'a> {
/// ## Errors
/// - Any of the underlying calls to the filesystem fail.
/// - The internal call to `Builder::add_leaf()` returns an error.
- pub fn add_dir(&mut self, path: impl AsRef, template: Option<&Leaf<'a>>) -> InternalResult<()> {
+ pub fn add_dir(&mut self, path: impl AsRef, template: Option<&Leaf<'a>>) -> InternalResult {
use std::fs;
let directory = fs::read_dir(path)?;
@@ -104,7 +106,7 @@ impl<'a> Builder<'a> {
/// [`Leaf`]s added directly do not implement data from the [`Builder`]s internal template.
/// ### Errors
/// - Returns an error if a [`Leaf`] with the specified `ID` exists.
- pub fn add_leaf(&mut self, leaf: Leaf<'a>) -> InternalResult<()> {
+ pub fn add_leaf(&mut self, leaf: Leaf<'a>) -> InternalResult {
// Make sure no two leaves are written with the same ID
if !self.id_set.insert(leaf.id.clone()) {
return Err(InternalError::LeafAppendError(leaf.id));
@@ -131,7 +133,7 @@ impl<'a> Builder<'a> {
/// This iterates over all [`Leaf`]s in the processing queue, parses them and writes the bytes out into a the target.
/// Configure the custom *`MAGIC`*, `Header` flags and a [`Keypair`](crate::crypto::Keypair) using the [`BuilderConfig`] struct.
- /// Wraps the `target` in [BufWriter].
+ /// Wraps the `target` in [BufWriter]. Also calls `io::Seek` on the target, so no need for calling it externally for synchronization.
/// ### Errors
/// - Underlying `io` errors
/// - If the optional compression or compression stages fails
@@ -141,15 +143,12 @@ impl<'a> Builder<'a> {
#[cfg(feature = "multithreaded")]
use rayon::prelude::*;
- // The total amount of bytes written
- let mut total_sync = 0usize;
-
#[allow(unused_mut)]
let mut reg_buffer_sync = Vec::new();
// Calculate the size of the registry and check for [`Leaf`]s that request for encryption
#[allow(unused_mut)]
- let mut leaf_offset_sync =
+ let mut leaf_offset_sync = {
self.leafs
.iter()
.map(|leaf| {
@@ -168,7 +167,8 @@ impl<'a> Builder<'a> {
}
})
.reduce(|l1, l2| l1 + l2)
- .unwrap_or(0) + Header::BASE_SIZE;
+ .unwrap_or(0) + Header::BASE_SIZE
+ } as u64;
// Start at the very start of the file
target.seek(SeekFrom::Start(0))?;
@@ -191,33 +191,25 @@ impl<'a> Builder<'a> {
wtr_sync.write_all(&crate::VERSION.to_le_bytes())?;
wtr_sync.write_all(&(self.leafs.len() as u16).to_le_bytes())?;
- // Update how many bytes have been written
- total_sync += Header::BASE_SIZE;
-
// Configure encryption
#[cfg(feature = "crypto")]
let use_encryption = self.leafs.iter().any(|leaf| leaf.encrypt);
- #[cfg(feature = "crypto")]
- if use_encryption && config.keypair.is_none() {
- return Err(InternalError::NoKeypairError(
- "Leaf encryption error! A leaf requested for encryption, yet no keypair was provided(None)".to_string(),
- ));
- };
-
// Build encryptor
#[cfg(feature = "crypto")]
let encryptor = if use_encryption {
- let keypair = &config.keypair;
-
- Some(Encryptor::new(&keypair.as_ref().unwrap().public, config.magic))
+ if let Some(keypair) = config.keypair.as_ref() {
+ Some(Encryptor::new(&keypair.public, config.magic))
+ } else {
+ return Err(InternalError::NoKeypairError);
+ }
} else {
None
};
// Define all arc-mutexes
- let leaf_offset_arc = Arc::new(AtomicUsize::new(leaf_offset_sync));
- let total_arc = Arc::new(AtomicUsize::new(total_sync));
+ let leaf_offset_arc = Arc::new(AtomicU64::new(leaf_offset_sync));
+ let total_arc = Arc::new(AtomicUsize::new(Header::BASE_SIZE));
let wtr_arc = Arc::new(Mutex::new(wtr_sync));
let reg_buffer_arc = Arc::new(Mutex::new(reg_buffer_sync));
@@ -236,8 +228,8 @@ impl<'a> Builder<'a> {
}
// Populate the archive glob
- iter_mut.try_for_each(|leaf: &mut Leaf<'a>| -> InternalResult<()> {
- let mut entry = leaf.to_registry_entry();
+ iter_mut.try_for_each(|leaf: &mut Leaf<'a>| -> InternalResult {
+ let mut entry: RegistryEntry = leaf.into();
let mut raw = Vec::new();
// Compression comes first
@@ -256,7 +248,7 @@ impl<'a> Builder<'a> {
let mut buffer = Vec::new();
leaf.handle.read_to_end(&mut buffer)?;
- let mut compressed_data = vec![];
+ let mut compressed_data = Vec::new();
Compressor::new(buffer.as_slice()).compress(leaf.compression_algo, &mut compressed_data)?;
if compressed_data.len() <= buffer.len() {
@@ -274,7 +266,7 @@ impl<'a> Builder<'a> {
#[cfg(not(feature = "compression"))]
{
if entry.flags.contains(Flags::COMPRESSED_FLAG) {
- return Err(InternalError::MissingFeatureError("compression".to_string()));
+ return Err(InternalError::MissingFeatureError("compression"));
};
leaf.handle.read_to_end(&mut raw)?;
@@ -291,30 +283,28 @@ impl<'a> Builder<'a> {
}
// Write processed leaf-contents and update offsets within `MutexGuard` protection
- let glob_length = raw.len();
+ let glob_length = raw.len() as u64;
{
// Lock writer
- let wtr_arc = Arc::clone(&wtr_arc);
let mut wtr = wtr_arc.lock().unwrap();
// Lock leaf_offset
- let leaf_offset_arc = Arc::clone(&leaf_offset_arc);
let leaf_offset = leaf_offset_arc.load(Ordering::SeqCst);
- wtr.seek(SeekFrom::Start(leaf_offset as u64))?;
+ wtr.seek(SeekFrom::Start(leaf_offset))?;
wtr.write_all(&raw)?;
// Update offset locations
- entry.location = leaf_offset as u64;
+ entry.location = leaf_offset;
leaf_offset_arc.fetch_add(glob_length, Ordering::SeqCst);
// Update number of bytes written
- total_arc.fetch_add(glob_length, Ordering::SeqCst);
+ total_arc.fetch_add(glob_length as usize, Ordering::SeqCst);
};
// Update the offset of the entry to be the length of the glob
- entry.offset = glob_length as u64;
+ entry.offset = glob_length;
#[cfg(feature = "crypto")]
if leaf.sign {
@@ -346,8 +336,7 @@ impl<'a> Builder<'a> {
// Write to the registry-buffer and update total number of bytes written
{
- let arc = Arc::clone(®_buffer_arc);
- let mut reg_buffer = arc.lock().unwrap();
+ let mut reg_buffer = reg_buffer_arc.lock().unwrap();
reg_buffer.write_all(&entry_bytes)?;
total_arc.fetch_add(entry_bytes.len(), Ordering::SeqCst);
@@ -363,10 +352,8 @@ impl<'a> Builder<'a> {
// Write out the contents of the registry
{
- let arc = Arc::clone(&wtr_arc);
- let mut wtr = arc.lock().unwrap();
+ let mut wtr = wtr_arc.lock().unwrap();
- let reg_buffer_arc = Arc::clone(®_buffer_arc);
let reg_buffer = reg_buffer_arc.lock().unwrap();
wtr.seek(SeekFrom::Start(Header::BASE_SIZE as u64))?;
diff --git a/vach/src/writer/config.rs b/vach/src/writer/config.rs
new file mode 100644
index 00000000..2436ee90
--- /dev/null
+++ b/vach/src/writer/config.rs
@@ -0,0 +1,117 @@
+use std::io;
+use std::fmt::Debug;
+
+use crate::global::{flags::Flags, result::InternalResult, reg_entry::RegistryEntry};
+
+#[cfg(feature = "crypto")]
+use crate::crypto;
+
+/// Allows for the customization of valid `vach` archives during their construction.
+/// Such as custom `MAGIC`, custom `Header` flags and signing by providing a keypair.
+pub struct BuilderConfig<'a> {
+ /// Used to write a unique magic sequence into the write target.
+ pub magic: [u8; crate::MAGIC_LENGTH],
+ /// Flags to be written into the `Header` section of the write target.
+ pub flags: Flags,
+ /// An optional keypair. If a key is provided, then the write target will have signatures for tamper verification.
+ #[cfg(feature = "crypto")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "crypto")))]
+ pub keypair: Option,
+ /// An optional callback that is called every time a [Leaf](crate::builder::Leaf) finishes processing.
+ /// The callback get passed to it: the leaf's id and the generated registry entry. Respectively.
+ /// > **To avoid** the `implementation of "FnOnce" is not general enough` error consider adding types to the closure's parameters, as this is a type inference error. Rust somehow cannot infer enough information, [link](https://www.reddit.com/r/rust/comments/ntqu68/implementation_of_fnonce_is_not_general_enough/).
+ /// Usage:
+ /// ```
+ /// use vach::builder::BuilderConfig;
+ ///
+ /// let builder_config = BuilderConfig::default();
+ /// ```
+ pub progress_callback: Option<&'a (dyn Fn(&str, &RegistryEntry) + Send + Sync)>,
+}
+
+impl<'a> Debug for BuilderConfig<'a> {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ let mut f = f.debug_struct("BuilderConfig");
+
+ f.field("magic", &self.magic);
+ f.field("flags", &self.flags);
+ f.field(
+ "progress_callback",
+ if self.progress_callback.is_some() {
+ &"Some(&dyn Fn(id: &str, reg_entry: &RegistryEntry))"
+ } else {
+ &"None"
+ },
+ );
+
+ #[cfg(feature = "crypto")]
+ f.field("keypair", &self.keypair);
+
+ f.finish()
+ }
+}
+
+impl<'a> BuilderConfig<'a> {
+ // Helper functions
+ /// Setter for the `keypair` field
+ #[cfg(feature = "crypto")]
+ pub fn keypair(mut self, keypair: crypto::Keypair) -> Self {
+ self.keypair = Some(keypair);
+ self
+ }
+
+ /// Setter for the `flags` field
+ ///```
+ /// use vach::prelude::{Flags, BuilderConfig};
+ ///
+ /// let config = BuilderConfig::default().flags(Flags::empty());
+ ///```
+ pub fn flags(mut self, flags: Flags) -> Self {
+ self.flags = flags;
+ self
+ }
+
+ /// Setter for the `magic` field
+ ///```
+ /// use vach::prelude::BuilderConfig;
+ /// let config = BuilderConfig::default().magic(*b"DbAfh");
+ ///```
+ pub fn magic(mut self, magic: [u8; 5]) -> BuilderConfig<'a> {
+ self.magic = magic;
+ self
+ }
+
+ /// Setter for the `progress_callback` field
+ ///```
+ /// use vach::prelude::{BuilderConfig, RegistryEntry};
+ ///
+ /// let callback = |_: &str, entry: &RegistryEntry| { println!("Number of bytes written: {}", entry.offset) };
+ /// let config = BuilderConfig::default().callback(&callback);
+ ///```
+ pub fn callback(mut self, callback: &'a (dyn Fn(&str, &RegistryEntry) + Send + Sync)) -> BuilderConfig<'a> {
+ self.progress_callback = Some(callback);
+ self
+ }
+
+ // Keypair helpers
+ /// Parses and stores a keypair from a source.
+ /// ### Errors
+ /// If the call to `::crypto_utils::read_keypair()` fails to parse the data from the handle
+ #[cfg(feature = "crypto")]
+ pub fn load_keypair(&mut self, handle: T) -> InternalResult {
+ self.keypair = Some(crate::crypto_utils::read_keypair(handle)?);
+ Ok(())
+ }
+}
+
+impl<'a> Default for BuilderConfig<'a> {
+ fn default() -> BuilderConfig<'a> {
+ BuilderConfig {
+ flags: Flags::default(),
+ magic: *crate::DEFAULT_MAGIC,
+ progress_callback: None,
+ #[cfg(feature = "crypto")]
+ keypair: None,
+ }
+ }
+}
diff --git a/vach/src/writer/leaf.rs b/vach/src/writer/leaf.rs
index 0a016ccd..2edfc78b 100644
--- a/vach/src/writer/leaf.rs
+++ b/vach/src/writer/leaf.rs
@@ -76,13 +76,6 @@ impl<'a> Leaf<'a> {
self.handle
}
- pub(crate) fn to_registry_entry(&self) -> RegistryEntry {
- let mut entry = RegistryEntry::empty();
- entry.content_version = self.content_version;
- entry.flags = self.flags;
- entry
- }
-
/// Copy all fields from another [`Leaf`], except for `handle` and `id`
/// Meant to be used like a setter:
/// ```rust
@@ -225,3 +218,12 @@ impl<'a> fmt::Debug for Leaf<'a> {
d.finish()
}
}
+
+impl From<&mut Leaf<'_>> for RegistryEntry {
+ fn from(leaf: &mut Leaf<'_>) -> Self {
+ let mut entry = RegistryEntry::empty();
+ entry.content_version = leaf.content_version;
+ entry.flags = leaf.flags;
+ entry
+ }
+}
diff --git a/vach/test_data/signed/target.vach b/vach/test_data/signed/target.vach
index ed4f5756..60cb8656 100644
Binary files a/vach/test_data/signed/target.vach and b/vach/test_data/signed/target.vach differ
diff --git a/vach/test_data/simple/target.vach b/vach/test_data/simple/target.vach
index b4ab9581..d1282ea8 100644
Binary files a/vach/test_data/simple/target.vach and b/vach/test_data/simple/target.vach differ