Skip to content

Commit 1f29e72

Browse files
committed
chore: clean up
Signed-off-by: chohee <[email protected]>
1 parent 648f289 commit 1f29e72

File tree

12 files changed

+31
-178
lines changed

12 files changed

+31
-178
lines changed

dragonfly-client-backend/Cargo.toml

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -12,8 +12,7 @@ edition.workspace = true
1212
[dependencies]
1313
dragonfly-client-core.workspace = true
1414
dragonfly-client-util.workspace = true
15-
# dragonfly-api.workspace = true
16-
dragonfly-api = { git = "file:///codes/dragonflyoss/api_fork", branch = "for_client"}
15+
dragonfly-api.workspace = true
1716
reqwest.workspace = true
1817
reqwest-middleware.workspace = true
1918
tokio.workspace = true

dragonfly-client-storage/Cargo.toml

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -14,8 +14,7 @@ dragonfly-client-core.workspace = true
1414
dragonfly-client-config.workspace = true
1515
dragonfly-client-metric.workspace = true
1616
dragonfly-client-util.workspace = true
17-
# dragonfly-api.workspace = true
18-
dragonfly-api = { git = "file:///codes/dragonflyoss/api_fork", branch = "for_client"}
17+
dragonfly-api.workspace = true
1918
chrono.workspace = true
2019
reqwest.workspace = true
2120
rocksdb.workspace = true

dragonfly-client-storage/src/content.rs

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1262,7 +1262,6 @@ mod tests {
12621262
// base64 key
12631263
let base64_key = "jqe8buWT8rsfBMYt8mpwSbnjy44WNy/5v1gN1JfFsNk=";
12641264
let key = base64::decode(base64_key).expect("Failed to decode base64 key");
1265-
println!("key: {:#x?}", key);
12661265

12671266
let config = Arc::new(config);
12681267
let content = Content::new(config, temp_dir.path(), Some(key)).await.unwrap();
@@ -1276,7 +1275,7 @@ mod tests {
12761275

12771276
let data = b"data";
12781277

1279-
// cal CRC
1278+
// calculate CRC
12801279
let mut plaintext_hasher = crc32fast::Hasher::new();
12811280
plaintext_hasher.update(data);
12821281
let plaintext_crc = plaintext_hasher.finalize();

dragonfly-client-storage/src/encrypt/algorithm/aes_ctr.rs

Lines changed: 4 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@ use aes::Aes256;
22
use ctr::Ctr128BE;
33
use ctr::cipher::{StreamCipher, KeyIvInit};
44
use generic_array::typenum::{U16, U32};
5-
// use generic_array::GenericArray;
5+
use generic_array::GenericArray;
66

77
use super::EncryptionAlgorithm;
88

@@ -12,26 +12,11 @@ impl EncryptionAlgorithm for Aes256Ctr {
1212
type NonceSize = U16;
1313
type KeySize = U32;
1414

15-
// fn new(key: &[u8], nonce: &[u8]) -> Self {
16-
// // if key.len() != <Self::KeySize as Unsigned>::to_usize() {
17-
// // panic!("invalid key length");
18-
// // }
19-
// // if nonce.len() != <Self::NonceSize as Unsigned>::to_usize() {
20-
// // panic!("invalid nonce length");
21-
// // }
22-
23-
// let key_array = GenericArray::<u8, Self::KeySize>::from_slice(key);
24-
// let nonce_array = GenericArray::<u8, Self::NonceSize>::from_slice(nonce);
25-
26-
// // <Ctr128BE<Aes256> as KeyIvInit>::new(key_array, nonce_array)
27-
// Self::new_from_array(key_array, nonce_array)
28-
// }
29-
3015
fn new_from_array(
31-
key: &generic_array::GenericArray<u8, Self::KeySize>,
32-
nonce: &generic_array::GenericArray<u8, Self::NonceSize>
16+
key: &GenericArray<u8, Self::KeySize>,
17+
nonce: &GenericArray<u8, Self::NonceSize>
3318
) -> Self where Self: Sized {
34-
<Ctr128BE<Aes256> as KeyIvInit>::new(key, nonce)
19+
<Aes256Ctr as KeyIvInit>::new(key, nonce)
3520
}
3621

3722
fn apply_keystream(&mut self, data: &mut [u8]) {
Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,38 +1,40 @@
11
mod aes_ctr;
22

3-
use cipher::Unsigned;
43
use generic_array::{GenericArray, ArrayLength};
54
use hkdf::Hkdf;
65
use sha2::Sha256;
76

87
pub use aes_ctr::Aes256Ctr;
98

9+
/// EncryptionAlgorithm is a trait that defines the encryption algorithm.
1010
pub trait EncryptionAlgorithm {
1111
/// Bytes of key
1212
type KeySize: ArrayLength<u8>;
1313
/// Bytes of nonce
1414
type NonceSize: ArrayLength<u8>;
1515

16+
/// new creates a new encryption algorithm from a key and a nonce using u8 slice.
1617
fn new(key: &[u8], nonce: &[u8]) -> Self where Self: Sized {
1718
// will panic if length is not fit
1819
let key_array = GenericArray::<u8, Self::KeySize>::from_slice(key);
1920
let nonce_array = GenericArray::<u8, Self::NonceSize>::from_slice(nonce);
2021
Self::new_from_array(key_array, nonce_array)
2122
}
2223

24+
/// new_from_array creates a new encryption algorithm from a key and a nonce using GenericArray.
2325
fn new_from_array(
2426
key: &GenericArray<u8, Self::KeySize>,
2527
nonce: &GenericArray<u8, Self::NonceSize>
2628
) -> Self where Self: Sized;
2729

30+
/// apply_keystream applies the keystream to the data.
2831
fn apply_keystream(&mut self, data: &mut [u8]);
2932

33+
/// derive_key derives a key from a master key and a task id.
3034
fn derive_key(master_key: &[u8], task_id: &str) -> GenericArray<u8, Self::KeySize> {
3135
let hk = Hkdf::<Sha256>::new(Some(task_id.as_bytes()), master_key);
3236
let mut okm: GenericArray<u8, Self::KeySize> = GenericArray::default();
3337
hk.expand(b"task-encryption", &mut okm).unwrap();
34-
assert_eq!(okm.len(), <Self::KeySize as Unsigned>::to_usize());
35-
assert_eq!(okm.len(), master_key.len());
3638
okm
3739
}
3840
}

dragonfly-client-storage/src/encrypt/cryptor/reader.rs

Lines changed: 3 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -7,28 +7,16 @@ use generic_array::GenericArray;
77

88
use crate::encrypt::{EncryptionAlgorithm, Aes256Ctr};
99

10+
/// EncryptReader is a reader that encrypts the data.
1011
pub struct EncryptReader<R: AsyncRead, A: EncryptionAlgorithm> {
1112
inner: R,
1213
cipher: A,
1314
}
1415

15-
// impl<R, A: EncryptionAlgorithm> EncryptReader<R, A> {
16-
// pub fn new(inner: R, key: &[u8], piece_id: &str) -> Self {
17-
// let (task_id, piece_num) = parse_piece_id(piece_id)
18-
// .expect("should have task_id and piece_num");
19-
20-
// let nonce = A::build_nonce(task_id, piece_num);
21-
// let cipher = A::new(key, &nonce);
22-
23-
// Self { inner, cipher }
24-
// }
25-
// }
26-
2716
impl<R: AsyncRead> EncryptReader<R, Aes256Ctr> {
2817
/// default for Aes256Ctr
2918
pub fn new(inner: R, key: &[u8], task_id: &str, offset: u64) -> Self {
3019
let key = <Aes256Ctr as EncryptionAlgorithm>::derive_key(key, task_id);
31-
// let nonce = [0u8; <Aes256Ctr as EncryptionAlgorithm>::NONCE_SIZE];
3220
let zero_nonce = GenericArray::<u8, <Aes256Ctr as EncryptionAlgorithm>::NonceSize>::default();
3321

3422
let mut cipher = <Aes256Ctr as EncryptionAlgorithm>::new_from_array(&key, &zero_nonce);
@@ -56,7 +44,8 @@ impl<R: AsyncRead + Unpin, A: EncryptionAlgorithm> AsyncRead for EncryptReader<R
5644
}
5745
}
5846

59-
// same for decrypt
47+
/// DecryptReader is a reader that decrypts the data.
48+
/// As same as EncryptReader due to symmetric encryption.
6049
pub type DecryptReader<R, A> = EncryptReader<R, A>;
6150

6251
#[cfg(test)]
@@ -78,7 +67,6 @@ mod tests {
7867

7968
#[tokio::test]
8069
async fn test_encrypt_decrypt_cycle() {
81-
// let (key, iv) = generate_key_iv();
8270
let key = generate_key();
8371

8472
// Simulate input reader with AsyncCursor

dragonfly-client-storage/src/metadata.rs

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1169,8 +1169,6 @@ impl<E: StorageEngineOwned> Metadata<E> {
11691169
})
11701170
.collect::<Result<Vec<Box<[u8]>>>>()?;
11711171

1172-
info!("query for task [{}] found {} pieces", task_id, pieces.len());
1173-
11741172
pieces
11751173
.iter()
11761174
.map(|piece| Piece::deserialize_from(piece))

dragonfly-client-storage/src/storage_engine/rocksdb.rs

Lines changed: 0 additions & 124 deletions
Original file line numberDiff line numberDiff line change
@@ -101,7 +101,6 @@ impl RocksdbStorageEngine {
101101

102102
// Initialize column family options.
103103
let mut cf_options = rocksdb::Options::default();
104-
// TODO
105104
cf_options.set_prefix_extractor(rocksdb::SliceTransform::create_fixed_prefix(64));
106105
cf_options.set_memtable_prefix_bloom_ratio(0.25);
107106
cf_options.optimize_level_style_compaction(Self::DEFAULT_MEMTABLE_MEMORY_BUDGET);
@@ -214,8 +213,6 @@ impl Operations for RocksdbStorageEngine {
214213
prefix: &[u8],
215214
) -> Result<impl Iterator<Item = Result<(Box<[u8]>, Box<[u8]>)>>> {
216215
let cf = cf_handle::<O>(self)?;
217-
// prefix should not shorter than `set_prefix_extractor`
218-
// assert!(prefix.len() >= 64);
219216
Ok(self.prefix_iterator_cf(cf, prefix).map(|ele| {
220217
let (key, value) = ele.or_err(ErrorType::StorageError)?;
221218
Ok((key, value))
@@ -645,125 +642,4 @@ mod tests {
645642
assert!(format!("{:?}", err).contains("ColumnFamilyNotFound"));
646643
}
647644
}
648-
649-
#[test]
650-
/// copied from `test_prefix_iter_raw`
651-
fn test_prefix_iter_raw_shorter_key_should_fail() {
652-
let engine = create_test_engine();
653-
654-
// RocksDB prefix extractor is configured with fixed_prefix(64) in the open method.
655-
let prefix_a = [b'a'; 64];
656-
let prefix_b = [b'b'; 64];
657-
658-
659-
// ADD shorter key test
660-
let prefix_a_shorter: [u8; 10] = prefix_a[..10].try_into().unwrap();
661-
662-
println!("prefix_a address: {:p}", &prefix_a);
663-
println!("prefix_a_shorter address: {:p}", &prefix_a_shorter);
664-
println!("shoter(len: {}): {:#?}",prefix_a_shorter.len(), prefix_a_shorter);
665-
// ADD shorter key test
666-
667-
// Create test keys with 64-byte identical prefixes.
668-
let key_a1 = [&prefix_a[..], b"_raw_suffix1"].concat();
669-
let key_a2 = [&prefix_a[..], b"_raw_suffix2"].concat();
670-
671-
let key_b1 = [&prefix_b[..], b"_raw_suffix1"].concat();
672-
let key_b2 = [&prefix_b[..], b"_raw_suffix2"].concat();
673-
674-
let objects_with_prefix_a = vec![
675-
(
676-
key_a1.clone(),
677-
Object {
678-
id: "raw_prefix_id_a1".to_string(),
679-
value: 100,
680-
},
681-
),
682-
(
683-
key_a2.clone(),
684-
Object {
685-
id: "raw_prefix_id_a2".to_string(),
686-
value: 200,
687-
},
688-
),
689-
];
690-
691-
let objects_with_prefix_b = vec![
692-
(
693-
key_b1.clone(),
694-
Object {
695-
id: "raw_prefix_id_b1".to_string(),
696-
value: 300,
697-
},
698-
),
699-
(
700-
key_b2.clone(),
701-
Object {
702-
id: "raw_prefix_id_b2".to_string(),
703-
value: 400,
704-
},
705-
),
706-
];
707-
708-
for (key, obj) in &objects_with_prefix_a {
709-
engine.put::<Object>(key, obj).unwrap();
710-
}
711-
712-
for (key, obj) in &objects_with_prefix_b {
713-
engine.put::<Object>(key, obj).unwrap();
714-
}
715-
716-
let retrieved_objects = engine
717-
// .prefix_iter_raw::<Object>(&prefix_a[..10])
718-
// .prefix_iter_raw::<Object>(&prefix_a)
719-
.prefix_iter_raw::<Object>(&prefix_a_shorter)
720-
.unwrap()
721-
.collect::<Result<Vec<_>>>()
722-
.unwrap();
723-
724-
// Can not seek value
725-
assert_eq!(
726-
retrieved_objects.len(),
727-
objects_with_prefix_a.len(),
728-
"expected {} raw objects with prefix 'a', but got {}",
729-
objects_with_prefix_a.len(),
730-
retrieved_objects.len()
731-
);
732-
733-
// println!("Retrieved objects count: {}", retrieved_objects.len());
734-
// for (i, (key, value)) in retrieved_objects.iter().enumerate() {
735-
// println!("Object {}: key={:?}, value_len={}", i, key, value.len());
736-
// if let Ok(obj) = Object::deserialize_from(value) {
737-
// println!(" -> deserialized: id={}, value={}", obj.id, obj.value);
738-
// } else {
739-
// println!(" -> failed to deserialize");
740-
// }
741-
// }
742-
743-
// // Verify each object with prefix can be deserialized from raw bytes.
744-
// for (_, object) in &objects_with_prefix_a {
745-
// let found = retrieved_objects
746-
// .iter()
747-
// .any(|(_, v)| match Object::deserialize_from(v) {
748-
// Ok(deserialized) => {
749-
// deserialized.id == object.id && deserialized.value == object.value
750-
// }
751-
// Err(_) => false,
752-
// });
753-
754-
// assert!(
755-
// found,
756-
// "could not find or deserialize object with key {:?}",
757-
// object.id
758-
// );
759-
// }
760-
761-
// // Verify objects with different prefix are not retrieved.
762-
// for (key, _) in &objects_with_prefix_b {
763-
// let found = retrieved_objects
764-
// .iter()
765-
// .any(|(k, _)| k.as_ref() == key.as_slice());
766-
// assert!(!found, "found object with different prefix: {:?}", key);
767-
// }
768-
}
769645
}

dragonfly-client-util/Cargo.toml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,7 @@ edition.workspace = true
1212
[dependencies]
1313
dragonfly-client-core.workspace = true
1414
dragonfly-api.workspace = true
15+
reqwest.workspace = true
1516
http-range-header.workspace = true
1617
http.workspace = true
1718
tracing.workspace = true

dragonfly-client/Cargo.toml

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -29,8 +29,7 @@ dragonfly-client-storage.workspace = true
2929
dragonfly-client-backend.workspace = true
3030
dragonfly-client-metric.workspace = true
3131
dragonfly-client-util.workspace = true
32-
# dragonfly-api.workspace = true
33-
dragonfly-api = { git = "file:///codes/dragonflyoss/api_fork", branch = "for_client"}
32+
dragonfly-api.workspace = true
3433
rcgen.workspace = true
3534
hyper.workspace = true
3635
hyper-util.workspace = true

0 commit comments

Comments
 (0)