Skip to content

Commit

Permalink
Refactoring
Browse files Browse the repository at this point in the history
  • Loading branch information
tomerfiliba committed Aug 6, 2024
1 parent 12d2a9d commit 2247714
Show file tree
Hide file tree
Showing 4 changed files with 45 additions and 47 deletions.
39 changes: 19 additions & 20 deletions src/hashing.rs
Original file line number Diff line number Diff line change
Expand Up @@ -31,9 +31,6 @@ pub(crate) struct PartedHash {

pub(crate) const INVALID_SIG: u32 = 0;

pub(crate) const USER_NAMESPACE: &[u8] = &[1];
pub(crate) const TYPED_NAMESPACE: &[u8] = &[2];

impl PartedHash {
#[allow(dead_code)]
pub const LEN: usize = size_of::<u64>();
Expand All @@ -60,28 +57,30 @@ impl PartedHash {
}
}

#[cfg(test)]
#[allow(dead_code)]
pub fn to_u64(&self) -> u64 {
((self.shard_selector as u64) << 48)
| ((self.row_selector as u64) << 32)
| (self.signature as u64)
}

// pub fn from_u64(val: u64) -> Self {
// Self {
// shard_selector: (val >> 48) as u16,
// row_selector: (val >> 32) as u16,
// signature: val as u32,
// }
// }
// pub fn as_bytes(&self) -> [u8; Self::LEN] {
// self.to_u64().to_le_bytes()
// }
// pub fn from_bytes(b: &[u8]) -> Self {
// assert_eq!(b.len(), Self::LEN);
// let buf: [u8; 8] = [b[0], b[1], b[2], b[3], b[4], b[5], b[6], b[7]];
// Self::from_u64(u64::from_le_bytes(buf))
// }
#[allow(dead_code)]
pub fn to_bytes(&self) -> [u8; Self::LEN] {
self.to_u64().to_le_bytes()
}
#[allow(dead_code)]
pub fn from_u64(val: u64) -> Self {
Self {
shard_selector: (val >> 48) as u16,
row_selector: (val >> 32) as u16,
signature: val as u32,
}
}
#[allow(dead_code)]
pub fn from_bytes(b: &[u8]) -> Self {
assert_eq!(b.len(), Self::LEN);
let buf: [u8; 8] = [b[0], b[1], b[2], b[3], b[4], b[5], b[6], b[7]];
Self::from_u64(u64::from_le_bytes(buf))
}
}

#[test]
Expand Down
37 changes: 18 additions & 19 deletions src/shard.rs
Original file line number Diff line number Diff line change
Expand Up @@ -74,10 +74,10 @@ pub(crate) struct ByHashIterator<'a> {
start_idx: usize,
}

type Entry = (Vec<u8>, Vec<u8>);
pub(crate) type KVPair = (Vec<u8>, Vec<u8>);

impl<'a> Iterator for ByHashIterator<'a> {
type Item = Result<Entry>;
type Item = Result<KVPair>;
fn next(&mut self) -> Option<Self::Item> {
while let Some(idx) = self.row.signatures[self.start_idx..]
.iter()
Expand Down Expand Up @@ -151,16 +151,22 @@ impl Shard {
Ok(())
}

// #[inline]
// fn is_special_offset(offset_and_size: u64) -> bool {
// (offset_and_size >> 62) != 0
// }

#[inline]
fn extract_offset_and_size(offset_and_size: u64) -> (usize, usize, u64) {
let klen = (offset_and_size >> 48) as usize;
debug_assert_eq!(klen >> 14, 0, "attempting to read a special key");
let vlen = ((offset_and_size >> 32) & 0xffff) as usize;
let offset = (offset_and_size as u32) as u64;
(klen, vlen, offset)
}

// reading doesn't require holding any locks - we only ever extend the file, never overwrite data
pub(crate) fn read_kv(&self, offset_and_size: u64) -> Result<Entry> {
fn read_kv(&self, offset_and_size: u64) -> Result<KVPair> {
let (klen, vlen, offset) = Self::extract_offset_and_size(offset_and_size);

let mut buf = vec![0u8; klen + vlen];
Expand Down Expand Up @@ -191,7 +197,7 @@ impl Shard {
Ok(((key.len() as u64) << 48) | ((val.len() as u64) << 32) | write_offset)
}

pub(crate) fn read_at(&self, row_idx: usize, entry_idx: usize) -> Option<Result<Entry>> {
pub(crate) fn read_at(&self, row_idx: usize, entry_idx: usize) -> Option<Result<KVPair>> {
let _guard = self.row_locks[row_idx].read().unwrap();
let row = &self.header.rows.0[row_idx];
if row.signatures[entry_idx] != INVALID_SIG {
Expand All @@ -201,13 +207,15 @@ impl Shard {
}
}

pub(crate) fn unlocked_iter<'b>(&'b self) -> impl Iterator<Item = Result<Entry>> + 'b {
pub(crate) fn unlocked_iter<'b>(&'b self) -> impl Iterator<Item = Result<KVPair>> + 'b {
self.header.rows.0.iter().flat_map(|row| {
row.signatures
.iter()
.enumerate()
.filter_map(|(idx, &sig)| (sig != INVALID_SIG).then_some(idx))
.map(|idx| self.read_kv(row.offsets_and_sizes[idx]))
row.signatures.iter().enumerate().filter_map(|(idx, &sig)| {
if sig == INVALID_SIG {
None
} else {
Some(self.read_kv(row.offsets_and_sizes[idx]))
}
})
})
}

Expand Down Expand Up @@ -279,15 +287,6 @@ impl Shard {
(guard, row)
}

/*pub(crate) fn insert_multikey(
&self,
keys: &[&[u8]],
val: &[u8],
mode: InsertMode,
) -> Result<InsertStatus> {
self.insert_fullkey(ph, &full_key, val, mode)
}*/

pub(crate) fn insert(
&self,
ph: PartedHash,
Expand Down
14 changes: 7 additions & 7 deletions src/store.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,13 +8,13 @@ use std::{
},
};

use crate::hashing::PartedHash;
use crate::{
hashing::USER_NAMESPACE,
shard::{Shard, NUM_ROWS, ROW_WIDTH},
};
use crate::shard::{Shard, NUM_ROWS, ROW_WIDTH};
use crate::{hashing::PartedHash, shard::KVPair};
use crate::{Config, Result};

pub(crate) const USER_NAMESPACE: &[u8] = &[1];
pub(crate) const TYPED_NAMESPACE: &[u8] = &[2];

/// Stats from VickyStore, mainly useful for debugging
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct Stats {
Expand Down Expand Up @@ -74,7 +74,7 @@ impl<'a> VickyStoreIterator<'a> {
}

impl<'a> Iterator for VickyStoreIterator<'a> {
type Item = Result<(Vec<u8>, Vec<u8>)>;
type Item = Result<KVPair>;

fn next(&mut self) -> Option<Self::Item> {
let guard = self.db.shards.read().unwrap();
Expand Down Expand Up @@ -307,7 +307,7 @@ impl VickyStore {
}

#[allow(dead_code)]
pub(crate) fn get_by_hash(&self, ph: PartedHash) -> Vec<Result<(Vec<u8>, Vec<u8>)>> {
pub(crate) fn get_by_hash(&self, ph: PartedHash) -> Vec<Result<KVPair>> {
self.shards
.read()
.unwrap()
Expand Down
2 changes: 1 addition & 1 deletion src/typed.rs
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
use std::{borrow::Borrow, marker::PhantomData, sync::Arc};

use crate::{
hashing::TYPED_NAMESPACE,
insertion::{GetOrCreateStatus, ReplaceStatus, SetStatus},
store::TYPED_NAMESPACE,
VickyStore,
};

Expand Down

0 comments on commit 2247714

Please sign in to comment.