Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Changed MAX_CHUNK_SIZE to const 4MB and removed lazy_static and dereferencing #400

Open
wants to merge 4 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 0 additions & 3 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -21,13 +21,10 @@ python = [
aes = "~0.8.1"
bincode = "~1.3.3"
hex = "~0.4.3"
lazy_static = "1.4.0"
rand = "~0.8.5"
rand_chacha = "~0.3.1"
rayon = "1.5.1"
thiserror = "1.0"
num_cpus = "1.13.0"
itertools = "~0.10.0"
tempfile = "3.6.0"
xor_name = "5.0.0"
pyo3 = { version = "=0.20.3", optional = true, features = ["extension-module"] }
Expand Down
15 changes: 3 additions & 12 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,6 @@ pub use self::{
stream::{StreamSelfDecryptor, StreamSelfEncryptor},
};
use bytes::Bytes;
use lazy_static::lazy_static;
use std::{
fs::File,
io::{Read, Write},
Expand All @@ -125,16 +124,8 @@ pub use xor_name;

/// The minimum size (before compression) of data to be self-encrypted, defined as 3B.
pub const MIN_ENCRYPTABLE_BYTES: usize = 3 * MIN_CHUNK_SIZE;
/// The default maximum size (before compression) of an individual chunk of a file, defaulting as 1MiB.
const DEFAULT_MAX_CHUNK_SIZE: usize = 1024 * 1024;

lazy_static! {
/// The maximum size (before compression) of an individual chunk of a file, defaulting as 1MiB.
pub static ref MAX_CHUNK_SIZE: usize = std::option_env!("MAX_CHUNK_SIZE")
.unwrap_or("1048576")
.parse::<usize>()
.unwrap_or(DEFAULT_MAX_CHUNK_SIZE);
}
/// The maximum size (before compression) of an individual chunk of a file, defined as 4MiB.
const MAX_CHUNK_SIZE: usize = 4 * 1024 * 1024;

/// The minimum size (before compression) of an individual chunk of a file, defined as 1B.
pub const MIN_CHUNK_SIZE: usize = 1;
Expand Down Expand Up @@ -601,7 +592,7 @@ where
));
}

let mut reader = BufReader::with_capacity(*MAX_CHUNK_SIZE, file);
let mut reader = BufReader::with_capacity(MAX_CHUNK_SIZE, file);
let mut chunk_infos = Vec::with_capacity(num_chunks);

// Ring buffer to hold all source hashes
Expand Down
1 change: 1 addition & 0 deletions src/python.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
#![allow(non_local_definitions)]
use crate::{
decrypt_from_storage as rust_decrypt_from_storage, encrypt_from_file as rust_encrypt_from_file,
streaming_decrypt_from_storage as rust_streaming_decrypt_from_storage, ChunkInfo, DataMap,
Expand Down
2 changes: 1 addition & 1 deletion src/stream.rs
Original file line number Diff line number Diff line change
Expand Up @@ -548,7 +548,7 @@ mod tests {

// Create an invalid chunk with random content
let invalid_chunk = EncryptedChunk {
content: Bytes::from(random_bytes(1024)),
content: random_bytes(1024),
};

// Try to decrypt with invalid chunk
Expand Down
20 changes: 10 additions & 10 deletions src/utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -62,13 +62,13 @@ pub(crate) fn get_num_chunks(file_size: usize) -> usize {
if file_size < (3 * crate::MIN_CHUNK_SIZE) {
return 0;
}
if file_size < (3 * *crate::MAX_CHUNK_SIZE) {
if file_size < (3 * crate::MAX_CHUNK_SIZE) {
return 3;
}
if file_size % *crate::MAX_CHUNK_SIZE == 0 {
file_size / *crate::MAX_CHUNK_SIZE
if file_size % crate::MAX_CHUNK_SIZE == 0 {
file_size / crate::MAX_CHUNK_SIZE
} else {
(file_size / *crate::MAX_CHUNK_SIZE) + 1
(file_size / crate::MAX_CHUNK_SIZE) + 1
}
}

Expand All @@ -77,7 +77,7 @@ pub(crate) fn get_chunk_size(file_size: usize, chunk_index: usize) -> usize {
if file_size < 3 * crate::MIN_CHUNK_SIZE {
return 0;
}
if file_size < 3 * *crate::MAX_CHUNK_SIZE {
if file_size < 3 * crate::MAX_CHUNK_SIZE {
if chunk_index < 2 {
return file_size / 3;
} else {
Expand All @@ -87,21 +87,21 @@ pub(crate) fn get_chunk_size(file_size: usize, chunk_index: usize) -> usize {
}
let total_chunks = get_num_chunks(file_size);
if chunk_index < total_chunks - 2 {
return *crate::MAX_CHUNK_SIZE;
return crate::MAX_CHUNK_SIZE;
}
let remainder = file_size % *crate::MAX_CHUNK_SIZE;
let remainder = file_size % crate::MAX_CHUNK_SIZE;
let penultimate = (total_chunks - 2) == chunk_index;
if remainder == 0 {
return *crate::MAX_CHUNK_SIZE;
return crate::MAX_CHUNK_SIZE;
}
if remainder < crate::MIN_CHUNK_SIZE {
if penultimate {
*crate::MAX_CHUNK_SIZE - crate::MIN_CHUNK_SIZE
crate::MAX_CHUNK_SIZE - crate::MIN_CHUNK_SIZE
} else {
crate::MIN_CHUNK_SIZE + remainder
}
} else if penultimate {
*crate::MAX_CHUNK_SIZE
crate::MAX_CHUNK_SIZE
} else {
remainder
}
Expand Down
Loading