diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml index 692e429..54714ed 100644 --- a/.github/workflows/rust.yml +++ b/.github/workflows/rust.yml @@ -16,15 +16,9 @@ jobs: - run: rustup update nightly && rustup default nightly - name: Build default run: cargo build --verbose - - name: Build no_std - run: cargo build --verbose --no-default-features --features no_std - - name: Build no_std with error_in_core - run: cargo build --verbose --no-default-features --features no_std,error_in_core - - name: Build with ptr_metadata - run: cargo build --verbose --features ptr_metadata - - name: Build restrictive - run: cargo build --verbose --no-default-features --features std + - name: Build no_std all features + run: cargo build --verbose --no-default-features --features no_std,debug,ptr_metadata,error_in_core - name: Build all features - run: cargo build --verbose --all-features + run: cargo build --verbose --no-default-features --features std,debug,ptr_metadata,error_in_core - name: Run tests run: cargo test --verbose --features ptr_metadata diff --git a/.github/workflows/semver.yml b/.github/workflows/semver.yml new file mode 100644 index 0000000..edb51fe --- /dev/null +++ b/.github/workflows/semver.yml @@ -0,0 +1,19 @@ +name: Rust + +on: + push: + pull_request: + +env: + CARGO_TERM_COLOR: always + +jobs: + build: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v3 + - run: rustup update nightly && rustup default nightly + - run: cargo install cargo-semver-checks + - name: Run semver check + run: cargo semver-checks check-release diff --git a/.vscode/launch.json b/.vscode/launch.json index 7839316..2a15a0c 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -97,6 +97,43 @@ "args": [], "cwd": "${workspaceFolder}" }, + { + "type": "lldb", + "request": "launch", + "name": "Debug example 'game_loading'", + "cargo": { + "args": [ + "build", + "--example=game_loading", + "--package=contiguous-mem" + ], + "filter": { + "name": "game_loading", + "kind": "example" + } + }, + "args": [], + "cwd": "${workspaceFolder}" + }, + { + "type": "lldb", + "request": "launch", + "name": "Debug unit tests in example 'game_loading'", + "cargo": { + "args": [ + "test", + "--no-run", + "--example=game_loading", + "--package=contiguous-mem" + ], + "filter": { + "name": "game_loading", + "kind": "example" + } + }, + "args": [], + "cwd": "${workspaceFolder}" + }, { "type": "lldb", "request": "launch", diff --git a/Cargo.toml b/Cargo.toml index 673150b..54b25e6 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "contiguous-mem" -version = "0.3.1" +version = "0.4.0" edition = "2021" description = "A contiguous memory storage" authors = ["Tin Å vagelj "] @@ -19,7 +19,7 @@ portable-atomic = { version = "1", default-features = false, optional = true } spin = { version = "0.9", optional = true } [features] -default = ["std", "leak_data"] +default = ["std", "ptr_metadata"] std = [] no_std = ["dep:portable-atomic", "dep:spin"] debug = [] @@ -27,5 +27,8 @@ leak_data = [] ptr_metadata = [] error_in_core = [] +[dev-dependencies] +byteorder = "1.4" + [package.metadata.docs.rs] all-features = true diff --git a/README.md b/README.md index c016837..3a7edc1 100644 --- a/README.md +++ b/README.md @@ -1,11 +1,21 @@ # contiguous_mem contiguous_mem streamlines storage and management of data stored in contiguous -memory block. +blocks of memory. -[![CI](https://github.com/Caellian/contiguous_mem/actions/workflows/rust.yml/badge.svg)](https://github.com/Caellian/contiguous_mem/actions/workflows/rust.yml) -[![Crates.io](https://img.shields.io/crates/v/contiguous_mem)](https://crates.io/crates/contiguous_mem) -[![Documentation](https://docs.rs/contiguous_mem/badge.svg)](https://docs.rs/contiguous_mem) +[![Crate](https://img.shields.io/crates/v/contiguous_mem?style=for-the-badge&logo=docs.rs)](https://crates.io/crates/contiguous_mem) +[![Documentation](https://img.shields.io/docsrs/contiguous-mem?style=for-the-badge&logo=rust)](https://docs.rs/contiguous-mem) +[![CI Status](https://img.shields.io/github/actions/workflow/status/Caellian/contiguous_mem/rust.yml?style=for-the-badge&logo=githubactions&logoColor=%23fff&label=CI)](https://github.com/Caellian/contiguous_mem/actions/workflows/rust.yml) +[![Zlib or MIT or Apache 2.0 license](https://img.shields.io/crates/l/contiguous-mem?style=for-the-badge)](https://github.com/Caellian/contiguous_mem#license) + +## Stability + +All versions prior to 1.0.0 are not considered production ready. This is my +first crate and there's still a lot of edge cases I didn't get a chance to +consider yet. + +Prelimenary tests are in place but I don't consider them sufficient to guarantee +correctness of behavior. ## Key Features @@ -23,33 +33,33 @@ safely wrapping referenced data if you don't need it. Default implementation keeps relative offsets of stored data which are resolved on access. -## Tradeoffs +## Use cases -- Works without nightly but leaks data, enable `ptr_metadata` or disable default - `leak_data` feature flag if memory leaks are an issue: +- Ensuring stored data is placed adjacently in memory. ([example](./examples/game_loading.rs)) +- Storing differently typed/sized data. ([example](./examples/default_impl.rs)) +## Tradeoffs + +- Works without nightly but leaks data requiring Drop or drop glue, enable + `ptr_metadata` or disable default `leak_data` feature flag if memory leaks are + an issue: - `ptr_metadata` requires nightly, - disabling `leak_data` imposes `Copy` requirement on stored types. -- References returned by `store` function follow the same borrow restrictions as the - language, `Deref` is implemented for `ContiguousMemoryRef` but it will panic on - dereference if it's been already mutably borrowed somewhere else. - Use `ContiguousMemoryRef::try_get` if you'd like to handle that properly. - ## Getting Started Add the crate to your dependencies: ```toml [dependencies] -contiguous_mem = { version = "0.3.0" } +contiguous_mem = { version = "0.4.*" } ``` Optionally disable the `std` feature and enable `no_std` feature to use in `no_std` environment: ```toml [dependencies] -contiguous_mem = { version = "0.3.0", default-features = false, features = ["no_std"] } +contiguous_mem = { version = "0.4.*", default-features = false, features = ["no_std"] } ``` ### Features @@ -59,10 +69,13 @@ contiguous_mem = { version = "0.3.0", default-features = false, features = ["no_ - `leak_data` (**default**) - disables `Copy` requirement for stored types, but any references in stored data will be leaked when the memory container is dropped - `debug` - enables `derive(Debug)` on structures unrelated to error handling -- `ptr_metadata` <_nightly_> - enables support for casting returned references - into `dyn Trait` types as well as cleaning up any types that implement `Drop` - or generate drop glue -- `error_in_core` <_nightly_> - enables support for `core::error::Error` in `no_std` environment +- [`ptr_metadata`](https://doc.rust-lang.org/beta/unstable-book/library-features/ptr-metadata.html) + <_nightly_> - enables support for casting returned references into + `dyn Trait` types as well as cleaning up any types that implement `Drop` or + generate drop glue +- [`error_in_core`](https://dev-doc.rust-lang.org/stable/unstable-book/library-features/error-in-core.html) + <_nightly_> - enables support for `core::error::Error` in `no_std` + environment ### Usage @@ -80,8 +93,8 @@ fn main() { // Store data in the memory container let data = Data { value: 42 }; - let stored_number: ContiguousMemoryRef = memory.store(22u64); - let stored_data: ContiguousMemoryRef = memory.store(data); + let stored_number: ContiguousMemoryRef = memory.push(22u64); + let stored_data: ContiguousMemoryRef = memory.push(data); // Retrieve and use the stored data assert_eq!(*stored_data.get(), data); @@ -89,9 +102,25 @@ fn main() { } ``` +- References have a similar API as + [`RefCell`](https://doc.rust-lang.org/stable/std/cell/struct.RefCell.html) + Note that reference types returned by store are inferred and only shown here for demonstration purposes. +## Alternatives + +- manually managing memory to ensure contiguous placement of data + - prone to errors and requires unsafe code +- using a custom allocator like + [blink-alloc](https://crates.io/crates/blink-alloc) to ensure contiguous + placement of data + - requires [`allocator_api`](https://doc.rust-lang.org/beta/unstable-book/library-features/allocator-api.html) + feature + - `blink-alloc` provides a similar functionality as this crate without the + `allocator_api` feature; intended for use in loops so it doesn't support + freeing _some_ values while retaining other + ## Contributions Contributions are welcome, feel free to diff --git a/doc/crate.md b/doc/crate.md index c47fa16..46e1395 100644 --- a/doc/crate.md +++ b/doc/crate.md @@ -20,10 +20,13 @@ See individual items for usage examples. - `leak_data` (**default**) - disables `Copy` requirement for stored types, but any references in stored data will be leaked when the memory container is dropped - `debug` - enables `derive(Debug)` on structures unrelated to error handling -- `ptr_metadata` <_nightly_> - enables support for casting returned references - into `dyn Trait` types as well as cleaning up any types that implement `Drop` - or generate drop glue -- `error_in_core` <_nightly_> - enables support for `core::error::Error` in `no_std` environment +- [`ptr_metadata`](https://doc.rust-lang.org/beta/unstable-book/library-features/ptr-metadata.html) + <_nightly_> - enables support for casting returned references into + `dyn Trait` types as well as cleaning up any types that implement `Drop` or + generate drop glue +- [`error_in_core`](https://dev-doc.rust-lang.org/stable/unstable-book/library-features/error-in-core.html) + <_nightly_> - enables support for `core::error::Error` in `no_std` + environment ## Contributions diff --git a/examples/default_impl.rs b/examples/default_impl.rs index f19bc0b..630c6ae 100644 --- a/examples/default_impl.rs +++ b/examples/default_impl.rs @@ -11,8 +11,8 @@ fn main() { // Store data in the memory container let data = Data { value: 42 }; - let stored_number: ContiguousMemoryRef = memory.store(22u64); - let stored_data: ContiguousMemoryRef = memory.store(data); + let stored_number: ContiguousEntryRef = memory.push(22u64); + let stored_data: ContiguousEntryRef = memory.push(data); // Retrieve and use the stored data assert_eq!(*stored_data.get(), data); diff --git a/examples/game_loading.rs b/examples/game_loading.rs new file mode 100644 index 0000000..7f86797 --- /dev/null +++ b/examples/game_loading.rs @@ -0,0 +1,256 @@ +use std::{ + io::{Cursor, ErrorKind, Read, Write}, + mem::align_of, +}; + +use byteorder::{ReadBytesExt, WriteBytesExt, LE}; +use contiguous_mem::prelude::*; + +pub enum IndexOrPtr { + Index(u32), + Ptr(*const T), +} +impl IndexOrPtr { + pub fn to_ref(&self, data: &[*const T]) -> IndexOrPtr { + match self { + IndexOrPtr::Index(index) => IndexOrPtr::Ptr(data[*index as usize]), + IndexOrPtr::Ptr(ref ptr) => IndexOrPtr::Ptr(*ptr), + } + } + + pub fn unwrap_ptr(&self) -> *const T { + match self { + IndexOrPtr::Index(_) => panic!("not a pointer"), + IndexOrPtr::Ptr(ptr) => *ptr, + } + } + + pub fn unwrap_ref(&self) -> &'static mut T { + unsafe { &mut *(self.unwrap_ptr() as *mut T) } + } +} + +pub trait Load { + unsafe fn load(data: R) -> Self; +} +pub trait Save { + fn save(&self, data: &mut W) -> Result<(), std::io::Error>; +} + +pub struct Enemy { + pub max_health: u32, + pub health: u32, + pub speed: f32, + pub age: f32, +} +impl Enemy { + pub fn reset(&mut self) { + self.health = self.max_health; + self.age = 0.0; + } +} +impl Load for Enemy { + unsafe fn load(mut data: R) -> Enemy { + Enemy { + max_health: data.read_u32::().unwrap_unchecked(), + health: data.read_u32::().unwrap_unchecked(), + speed: data.read_f32::().unwrap_unchecked(), + age: data.read_f32::().unwrap_unchecked(), + } + } +} +impl Save for Enemy { + fn save(&self, data: &mut W) -> Result<(), std::io::Error> { + data.write_u32::(self.max_health)?; + data.write_u32::(self.health)?; + data.write_f32::(self.speed)?; + data.write_f32::(self.age)?; + Ok(()) + } +} + +pub struct Level { + pub enemies: Vec>, +} + +impl Load for Level { + unsafe fn load(mut data: R) -> Level { + let enemies_count = data.read_u32::().unwrap(); + let enemies = (0..enemies_count).map(|_| { + let enemy_index = data.read_u32::().unwrap(); + IndexOrPtr::Index(enemy_index) + }); + Level { + enemies: enemies.collect(), + } + } +} +impl Save for Level { + fn save(&self, data: &mut W) -> Result<(), std::io::Error> { + data.write_u32::(self.enemies.len() as u32)?; + for enemy in self.enemies.iter() { + match enemy { + IndexOrPtr::Index(index) => data.write_u32::(*index)?, + IndexOrPtr::Ptr(_) => { + return Err(std::io::Error::new( + ErrorKind::InvalidData, + "can't save level with references", + )) + } + } + } + Ok(()) + } +} + +// this function emulates FS access for this example, ignore it +fn load_game_file(file_name: &'static str) -> T { + let mut data = Vec::with_capacity(24); + let mut data_cursor = Cursor::new(&mut data); + match file_name { + "enemy1.dat" => Enemy { + max_health: 200, + health: 200, + speed: 2.0, + age: 0.0, + } + .save(&mut data_cursor) + .unwrap(), + "enemy2.dat" => Enemy { + max_health: 200, + health: 200, + speed: 2.0, + age: 0.0, + } + .save(&mut data_cursor) + .unwrap(), + "enemy3.dat" => Enemy { + max_health: 200, + health: 200, + speed: 2.0, + age: 0.0, + } + .save(&mut data_cursor) + .unwrap(), + "enemy4.dat" => Enemy { + max_health: 200, + health: 200, + speed: 2.0, + age: 0.0, + } + .save(&mut data_cursor) + .unwrap(), + "level1.dat" => Level { + enemies: vec![ + IndexOrPtr::Index(0), + IndexOrPtr::Index(1), + IndexOrPtr::Index(1), + IndexOrPtr::Index(2), + ], + } + .save(&mut data_cursor) + .unwrap(), + "level2.dat" => Level { + enemies: vec![ + IndexOrPtr::Index(1), + IndexOrPtr::Index(1), + IndexOrPtr::Index(1), + IndexOrPtr::Index(2), + IndexOrPtr::Index(2), + IndexOrPtr::Index(3), + IndexOrPtr::Index(3), + ], + } + .save(&mut data_cursor) + .unwrap(), + _ => unreachable!(), + }; + data_cursor.set_position(0); + + unsafe { T::load(data_cursor) } +} + +fn main() { + let mut data = UnsafeContiguousMemory::new_aligned(112, align_of::()).unwrap(); + + // Create enemy lookup list. + let enemies: &[*const Enemy] = unsafe { + &[ + data.push(load_game_file("enemy1.dat")).unwrap_unchecked(), + data.push(load_game_file("enemy2.dat")).unwrap_unchecked(), + data.push(load_game_file("enemy3.dat")).unwrap_unchecked(), + data.push(load_game_file("enemy4.dat")).unwrap_unchecked(), + ] + }; + + // Create level lookup list. + let levels: &[*mut Level] = unsafe { + &[ + data.push(load_game_file("level1.dat")).unwrap_unchecked(), + data.push(load_game_file("level2.dat")).unwrap_unchecked(), + ] + }; + + // data won't go out of scope while we're using it in this example, but in + // your use case it might. This is here for completeness. + data.forget(); + // now we can assume all created pointers are 'static + + // prepare levels for use + levels.iter().for_each(|level| { + let level = unsafe { &mut **level }; + + level.enemies = level + .enemies + .iter() + .map(|enemy| enemy.to_ref(enemies)) + .collect(); + }); + + let mut time = 0.0; + let mut current_level: usize = 0; + + // Main game loop + while current_level < levels.len() { + // Simulate the passage of time (you can replace this with your game logic) + time += 1.0; + + let mut all_enemies_killed = true; + let current_lvl = unsafe { &mut *levels[current_level] }; + + for enemy in current_lvl.enemies.iter_mut() { + let enemy_ref = enemy.unwrap_ref(); + + let health_reduction = ((5.0 + time * 0.25) as u32).min(enemy_ref.health); + enemy_ref.health -= health_reduction; + enemy_ref.age += 1.0; + + // Check if the enemy is still alive + if enemy_ref.health > 0 { + all_enemies_killed = false; + } + } + + // If all enemies in the current level are killed, reset them and progress to the next level + if all_enemies_killed { + println!( + "All enemies in level {} have been killed!", + current_level + 1 + ); + current_level += 1; + + // Reset all enemies in the next level + if current_level < levels.len() { + let next_level = unsafe { &mut *levels[current_level] }; + for enemy in next_level.enemies.iter_mut() { + enemy.unwrap_ref().reset(); + } + } + } + } + + println!( + "Congratulations! You've completed all levels in: {:.2}", + time + ); +} diff --git a/examples/ptr_metadata.rs b/examples/ptr_metadata.rs index 0d5bf7d..64405a3 100644 --- a/examples/ptr_metadata.rs +++ b/examples/ptr_metadata.rs @@ -22,13 +22,12 @@ impl Greetable for Dog { fn main() { let mut storage = ContiguousMemory::new(4096); - let person1 = storage.store(Person("Joe".to_string())); + let person1 = storage.push(Person("Joe".to_string())); - let person2: ContiguousMemoryRef = - storage.store(Person("Craig".to_string())).into_dyn(); + let person2: ContiguousEntryRef = + storage.push(Person("Craig".to_string())).into_dyn(); - let dog: ContiguousMemoryRef = - storage.store(Dog("Rover".to_string())).into_dyn(); + let dog: ContiguousEntryRef = storage.push(Dog("Rover".to_string())).into_dyn(); person1.get().print_hello(); person2.get().print_hello(); diff --git a/examples/sync_impl.rs b/examples/sync_impl.rs index 0c80462..8bc06b4 100644 --- a/examples/sync_impl.rs +++ b/examples/sync_impl.rs @@ -10,21 +10,21 @@ fn main() { let mut sent_storage = storage.clone(); let writer_one = - std::thread::spawn(move || sent_storage.store(22u64).expect("unable to store number")); + std::thread::spawn(move || sent_storage.push(22u64).expect("unable to store number")); let data = Data { value: 42 }; let mut sent_storage = storage.clone(); let writer_two = std::thread::spawn(move || { sent_storage - .store(Data { value: 42 }) + .push(Data { value: 42 }) .expect("unable to store Data") }); - let stored_number: SyncContiguousMemoryRef = + let stored_number: SyncContiguousEntryRef = writer_one.join().expect("unable to join number thread"); let mut stored_number_clone = stored_number.clone(); - let stored_data: SyncContiguousMemoryRef = + let stored_data: SyncContiguousEntryRef = writer_two.join().expect("unable to join Data thread"); let number_ref = stored_number diff --git a/examples/unsafe_impl.rs b/examples/unsafe_impl.rs index 8591747..233478a 100644 --- a/examples/unsafe_impl.rs +++ b/examples/unsafe_impl.rs @@ -13,10 +13,10 @@ fn main() { let data = Data { value: 42 }; let stored_number: *mut u64 = memory - .store(22u64) + .push(22u64) .expect("there should be enough space to store a number"); let stored_data: *mut Data = memory - .store(data) + .push(data) .expect("there should be enough space to store Data"); // Retrieve and use the stored data diff --git a/src/details.rs b/src/details.rs index d936f2a..fa3a4b1 100644 --- a/src/details.rs +++ b/src/details.rs @@ -25,10 +25,10 @@ use std::sync::atomic::{AtomicUsize, Ordering}; use crate::{ error::{ContiguousMemoryError, LockSource, LockingError}, range::ByteRange, - refs::{sealed::*, ContiguousMemoryRef, SyncContiguousMemoryRef}, + refs::{sealed::*, ContiguousEntryRef, SyncContiguousEntryRef}, tracker::AllocationTracker, types::*, - BaseLocation, ContiguousMemoryState, ContiguousMemoryStorage, + BaseLocation, ContiguousMemoryState, }; /// Implementation details shared between [storage](StorageDetails) and @@ -59,7 +59,7 @@ pub trait ImplBase: Sized { pub struct ImplDefault; impl ImplBase for ImplDefault { type StorageState = Rc>; - type ReferenceType = ContiguousMemoryRef; + type ReferenceType = ContiguousEntryRef; type LockResult = T; } @@ -75,7 +75,7 @@ impl ImplBase for ImplDefault { pub struct ImplConcurrent; impl ImplBase for ImplConcurrent { type StorageState = Arc>; - type ReferenceType = SyncContiguousMemoryRef; + type ReferenceType = SyncContiguousEntryRef; type LockResult = Result; const USES_LOCKS: bool = true; @@ -115,7 +115,7 @@ pub trait StorageDetails: ImplBase { fn build_state( base: *mut u8, capacity: usize, - align: usize, + alignment: usize, ) -> Result; /// Dereferences the inner state smart pointer and returns it by reference. @@ -124,6 +124,9 @@ pub trait StorageDetails: ImplBase { /// Retrieves the base pointer from the base instance. fn get_base(base: &Self::Base) -> Self::LockResult<*mut u8>; + /// Retrieves the base pointer from the base instance. Non blocking version. + fn try_get_base(base: &Self::Base) -> Self::LockResult<*mut u8>; + /// Retrieves the capacity from the state. fn get_capacity(capacity: &Self::SizeType) -> usize; @@ -147,7 +150,7 @@ pub trait StorageDetails: ImplBase { fn shrink_tracker(state: &mut Self::StorageState) -> Result, LockingError>; /// Finds the next free memory region for given layout in the tracker. - fn store_next( + fn track_next( state: &mut Self::StorageState, layout: Layout, ) -> Result; @@ -169,9 +172,9 @@ impl StorageDetails for ImplConcurrent { fn build_state( base: *mut u8, capacity: usize, - align: usize, + alignment: usize, ) -> Result { - let layout = Layout::from_size_align(capacity, align)?; + let layout = Layout::from_size_align(capacity, alignment)?; Ok(Arc::new(ContiguousMemoryState { base: BaseLocation(RwLock::new(base)), @@ -190,6 +193,11 @@ impl StorageDetails for ImplConcurrent { .map(|result| *result) } + fn try_get_base(base: &Self::Base) -> Self::LockResult<*mut u8> { + base.try_read_named(LockSource::BaseAddress) + .map(|result| *result) + } + fn get_capacity(capacity: &Self::SizeType) -> usize { capacity.load(Ordering::Acquire) } @@ -231,12 +239,13 @@ impl StorageDetails for ImplConcurrent { Ok(lock.shrink_to_fit()) } - fn store_next( + fn track_next( state: &mut Self::StorageState, layout: Layout, ) -> Result { + let base = Self::get_base(&state.base)? as usize; let mut lock = state.tracker.lock_named(LockSource::AllocationTracker)?; - lock.take_next(layout) + lock.take_next(base, layout) } fn peek_next( @@ -252,14 +261,14 @@ impl StorageDetails for ImplDefault { type Base = Cell<*mut u8>; type AllocationTracker = RefCell; type SizeType = Cell; - type StoreResult = ContiguousMemoryRef; + type StoreResult = ContiguousEntryRef; fn build_state( base: *mut u8, capacity: usize, - align: usize, + alignment: usize, ) -> Result { - let layout: Layout = Layout::from_size_align(capacity, align)?; + let layout: Layout = Layout::from_size_align(capacity, alignment)?; Ok(Rc::new(ContiguousMemoryState { base: BaseLocation(Cell::new(base)), @@ -277,6 +286,10 @@ impl StorageDetails for ImplDefault { base.get() } + fn try_get_base(base: &Self::Base) -> Self::LockResult<*mut u8> { + Self::get_base(base) + } + fn get_capacity(capacity: &Self::SizeType) -> usize { capacity.get() } @@ -313,15 +326,16 @@ impl StorageDetails for ImplDefault { Ok(state.tracker.borrow_mut().shrink_to_fit()) } - fn store_next( + fn track_next( state: &mut Self::StorageState, layout: Layout, ) -> Result { + let base = Self::get_base(&state.base) as usize; let mut tracker = state .tracker .try_borrow_mut() .map_err(|_| ContiguousMemoryError::TrackerInUse)?; - tracker.take_next(layout) + tracker.take_next(base, layout) } fn peek_next( @@ -345,9 +359,9 @@ impl StorageDetails for ImplUnsafe { fn build_state( base: *mut u8, capacity: usize, - align: usize, + alignment: usize, ) -> Result { - let layout = Layout::from_size_align(capacity, align)?; + let layout = Layout::from_size_align(capacity, alignment)?; Ok(ContiguousMemoryState { base: BaseLocation(base), capacity: layout.size(), @@ -364,6 +378,10 @@ impl StorageDetails for ImplUnsafe { *base } + fn try_get_base(base: &Self::Base) -> Self::LockResult<*mut u8> { + Self::get_base(base) + } + fn get_capacity(capacity: &Self::SizeType) -> usize { *capacity } @@ -400,11 +418,12 @@ impl StorageDetails for ImplUnsafe { Ok(state.tracker.shrink_to_fit()) } - fn store_next( + fn track_next( state: &mut Self::StorageState, layout: Layout, ) -> Result { - state.tracker.take_next(layout) + let base = Self::get_base(&state.base) as usize; + state.tracker.take_next(base, layout) } fn peek_next( @@ -467,7 +486,7 @@ impl ReferenceDetails for ImplConcurrent { _addr: *mut T, range: ByteRange, ) -> Self::ReferenceType { - SyncContiguousMemoryRef { + SyncContiguousEntryRef { inner: Arc::new(ReferenceState { state: state.clone(), range: range.clone(), @@ -506,7 +525,7 @@ impl ReferenceDetails for ImplDefault { _addr: *mut T, range: ByteRange, ) -> Self::ReferenceType { - ContiguousMemoryRef { + ContiguousEntryRef { inner: Rc::new(ReferenceState { state: state.clone(), range: range.clone(), @@ -553,7 +572,7 @@ impl ReferenceDetails for ImplUnsafe { } pub trait StoreDataDetails: StorageDetails { - unsafe fn store_data( + unsafe fn push_raw( state: &mut Self::StorageState, data: *mut T, layout: Layout, @@ -566,13 +585,13 @@ pub trait StoreDataDetails: StorageDetails { } impl StoreDataDetails for ImplConcurrent { - unsafe fn store_data( + unsafe fn push_raw( state: &mut Self::StorageState, data: *mut T, layout: Layout, - ) -> Result, LockingError> { + ) -> Result, LockingError> { let (addr, range) = loop { - match ImplConcurrent::store_next(state, layout) { + match ImplConcurrent::track_next(state, layout) { Ok(taken) => { let found = (taken.0 + ImplConcurrent::get_base(&ImplConcurrent::deref_state(state).base)? @@ -604,7 +623,7 @@ impl StoreDataDetails for ImplConcurrent { fn assume_stored( state: &Self::StorageState, position: usize, - ) -> Result, LockingError> { + ) -> Result, LockingError> { let addr = unsafe { ImplConcurrent::get_base(&ImplConcurrent::deref_state(state).base)?.add(position) }; @@ -617,13 +636,13 @@ impl StoreDataDetails for ImplConcurrent { } impl StoreDataDetails for ImplDefault { - unsafe fn store_data( + unsafe fn push_raw( state: &mut Self::StorageState, data: *mut T, layout: Layout, - ) -> ContiguousMemoryRef { + ) -> ContiguousEntryRef { let (addr, range) = loop { - match ImplDefault::store_next(state, layout) { + match ImplDefault::track_next(state, layout) { Ok(taken) => { let found = (taken.0 + ImplDefault::get_base(&ImplDefault::deref_state(state).base) as usize) @@ -655,7 +674,7 @@ impl StoreDataDetails for ImplDefault { fn assume_stored( state: &Self::StorageState, position: usize, - ) -> ContiguousMemoryRef { + ) -> ContiguousEntryRef { let addr = unsafe { ImplDefault::get_base(&ImplDefault::deref_state(state).base).add(position) }; ImplDefault::build_ref(state, addr as *mut T, ByteRange(position, size_of::())) @@ -664,13 +683,13 @@ impl StoreDataDetails for ImplDefault { impl StoreDataDetails for ImplUnsafe { /// Returns a raw pointer (`*mut T`) to the stored value or - unsafe fn store_data( + unsafe fn push_raw( state: &mut Self::StorageState, data: *mut T, layout: Layout, ) -> Result<*mut T, ContiguousMemoryError> { let (addr, range) = loop { - match ImplUnsafe::store_next(state, layout) { + match ImplUnsafe::track_next(state, layout) { Ok(taken) => { let found = (taken.0 + ImplUnsafe::get_base(&ImplUnsafe::deref_state(state).base) as usize) @@ -698,47 +717,6 @@ impl StoreDataDetails for ImplUnsafe { } } -/// A deprecated trait for specializing store function across implementations. -/// -/// These store functions are now available directly on ContiguousMemoryStorage -/// and implemented in a sealed module. -#[deprecated( - since = "0.3.1", - note = "Use methods available directly on ContiguousMemoryStorage" -)] -pub trait StoreData { - /// See [`ContiguousMemoryStorage::store_data`]. - unsafe fn store_data( - &mut self, - data: *mut T, - layout: Layout, - ) -> Impl::StoreResult; - - /// See [`ContiguousMemoryStorage::assume_stored`]. - unsafe fn assume_stored( - &self, - position: usize, - ) -> Impl::LockResult>; -} - -#[allow(deprecated)] -impl StoreData for ContiguousMemoryStorage { - unsafe fn store_data( - &mut self, - data: *mut T, - layout: Layout, - ) -> Impl::StoreResult { - Impl::store_data(&mut self.inner, data, layout) - } - - unsafe fn assume_stored( - &self, - position: usize, - ) -> Impl::LockResult> { - Impl::assume_stored(&self.inner, position) - } -} - /// Trait representing requirements for implementation details of the /// [`ContiguousMemoryStorage`](ContiguousMemoryStorage). /// diff --git a/src/error.rs b/src/error.rs index 2a043ae..7c7f76f 100644 --- a/src/error.rs +++ b/src/error.rs @@ -27,27 +27,37 @@ pub enum LockingError { source: LockSource, }, /// Not lockable because the lock would be blocking. - WouldBlock, + WouldBlock { + /// Specifies which mutex/lock would block. + source: LockSource, + }, } #[cfg(any(feature = "std", feature = "error_in_core"))] impl Display for LockingError { fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult { match self { - LockingError::Poisoned { source: which } => match which { - LockSource::BaseAddress => { - write!(f, "Cannot acquire lock: base address Mutex was poisoned") + LockingError::Poisoned { source } => write!( + f, + "Cannot acquire lock: {}", + match source { + LockSource::BaseAddress => { + "base address Mutex was poisoned" + } + LockSource::AllocationTracker => "AllocationTracker Mutex was poisoned", + LockSource::Reference => + "reference concurrent mutable access exclusion flag Mutex was poisoned", } - LockSource::AllocationTracker => write!( - f, - "Cannot acquire lock: AllocationTracker Mutex was poisoned" - ), - LockSource::Reference => write!( - f, - "Cannot acquire lock: reference concurrent mutable access exclusion flag Mutex was poisoned" - ) - }, - LockingError::WouldBlock => write!(f, "Lock would be block"), + ), + LockingError::WouldBlock { source } => write!( + f, + "Lock would block the current thread: {}", + match source { + LockSource::BaseAddress => "base address already borrowed", + LockSource::AllocationTracker => "AllocationTracker already borrowed", + LockSource::Reference => "reference already borrowed", + } + ), } } } @@ -77,19 +87,6 @@ impl From>> for Lo } } -#[cfg(feature = "std")] -impl From> for LockingError -where - LockingError: From>, -{ - fn from(value: std::sync::TryLockError) -> Self { - match value { - std::sync::TryLockError::Poisoned(poison_err) => LockingError::from(poison_err), - std::sync::TryLockError::WouldBlock => LockingError::WouldBlock, - } - } -} - /// Error returned when concurrent mutable access is attempted to the same /// memory region. #[derive(Debug)] @@ -98,12 +95,6 @@ pub struct RegionBorrowedError { pub range: ByteRange, } -#[deprecated( - since = "0.3.1", - note = "Renamed; use RegionBorrowedError" -)] -pub use RegionBorrowedError as RegionBorrowed; - #[cfg(any(feature = "std", feature = "error_in_core"))] impl Display for RegionBorrowedError { fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult { diff --git a/src/lib.rs b/src/lib.rs index dc28208..40bd0cf 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -46,8 +46,7 @@ use error::ContiguousMemoryError; pub mod prelude { pub use crate::{ error::*, range::ByteRange, refs::*, ContiguousMemory, ContiguousMemoryStorage, - ImplConcurrent, ImplDefault, ImplDetails, ImplUnsafe, SyncContiguousMemory, - UnsafeContiguousMemory, + ImplConcurrent, ImplDefault, ImplUnsafe, SyncContiguousMemory, UnsafeContiguousMemory, }; } @@ -110,17 +109,20 @@ impl ContiguousMemoryStorage { /// The capacity represents the size of the memory block that has been /// allocated for storing data. It may be larger than the amount of data /// currently stored within the container. + #[must_use] pub fn get_capacity(&self) -> usize { Impl::get_capacity(&self.capacity) } /// Returns the layout of the memory region containing stored data. + #[must_use] pub fn get_layout(&self) -> Layout { Impl::deref_state(&self.inner).layout() } /// Resizes the memory container to the specified `new_capacity`, optionally - /// returning the new base address of the stored items. + /// returning the new base address of the stored items - if `None` is + /// returned the base address of the memory block is the same. /// /// Shrinking the container is generally performed in place by freeing /// tailing memory space, but growing it can move the data in memory to find @@ -131,16 +133,13 @@ impl ContiguousMemoryStorage { /// /// # Errors /// - /// This function can return the following errors: + /// [`ContiguousMemoryError::Unshrinkable`] error is returned when + /// attempting to shrink the memory container, but previously stored data + /// prevents the container from being shrunk to the desired capacity. /// - /// - [`ContiguousMemoryError::Unshrinkable`]: Returned when attempting to - /// shrink the memory container, but the stored data prevents the - /// container from being shrunk to the desired capacity. - /// - /// - [`ContiguousMemoryError::Lock`]: Returned if the mutex holding the - /// base address or the [`AllocationTracker`] is poisoned. - /// - /// [`AllocationTracker`]: crate::tracker::AllocationTracker + /// In a concurrent implementation [`ContiguousMemoryError::Lock`] is + /// returned if the mutex holding the base address or the + /// [`AllocationTracker`](crate::tracker::AllocationTracker) is poisoned. pub fn resize( &mut self, new_capacity: usize, @@ -205,10 +204,7 @@ impl ContiguousMemoryStorage { /// returned under same conditions. /// /// Unsafe implementation never fails. - pub fn can_store( - &self, - value: &T, - ) -> Result { + pub fn can_push(&self, value: &T) -> Result { let layout = Layout::for_value(&value); Ok(Impl::peek_next(&self.inner, layout)?.is_some()) } @@ -221,8 +217,8 @@ impl ContiguousMemoryStorage { /// /// Returned value is implementation specific: /// - For concurrent implementation it is - /// `Result, LockingError>`, - /// - For default implementation it is `ContiguousMemoryRef`, + /// `Result, LockingError>`, + /// - For default implementation it is `ContiguousEntryRef`, /// - For unsafe implementation it is /// `Result<*mut u8, ContiguousMemoryError>`. /// @@ -244,27 +240,27 @@ impl ContiguousMemoryStorage { /// Memory block can still be grown by calling [`ContiguousMemory::resize`], /// but it can't be done automatically as that would invalidate all the /// existing pointers without any indication. - pub fn store(&mut self, value: T) -> Impl::StoreResult { + pub fn push(&mut self, value: T) -> Impl::StoreResult { let mut data = ManuallyDrop::new(value); let layout = Layout::for_value(&data); let pos = &mut *data as *mut T; - let result = unsafe { self.store_data(pos, layout) }; + let result = unsafe { self.push_raw(pos, layout) }; result } - /// Works same as [`store`](ContiguousMemory::store) but takes a pointer and + /// Works same as [`store`](ContiguousMemory::push) but takes a pointer and /// layout. /// /// Pointer type is used to deduce the destruction behavior for /// implementations that return a reference, but can be disabled by casting /// the provided pointer into `*mut ()` type and then calling /// [`core::mem::transmute`] on the returned reference. - pub unsafe fn store_data( + pub unsafe fn push_raw( &mut self, data: *mut T, layout: Layout, ) -> Impl::StoreResult { - Impl::store_data(&mut self.inner, data, layout) + Impl::push_raw(&mut self.inner, data, layout) } /// Assumes value is stored at the provided _relative_ `position` in @@ -299,20 +295,43 @@ impl ContiguousMemoryStorage { ) -> Impl::LockResult> { Impl::assume_stored(&self.inner, position) } -} -#[allow(deprecated)] -pub use details::StoreData; + /// Forgets this container without dropping it. + /// + /// Calling this method will create a memory leak because the smart pointer + /// to state will not be dropped even when all of the created references go + /// out of scope. As this method takes ownership of the container, calling + /// it also ensures that dereferencing pointers created by + /// [`as_ptr`](ContiguousEntryReference::as_ptr), + /// [`as_ptr_mut`](ContiguousEntryReference::as_ptr_mut), + /// [`into_ptr`](ContiguousEntryReference::into_ptr), and + /// [`into_ptr_mut`](ContiguousEntryReference::into_ptr_mut) + /// `ContiguousEntryReference` methods is guaranteed to be safe. + /// + /// This method isn't unsafe as leaking data doesn't cause undefined + /// behavior. + /// ([_see details_](https://doc.rust-lang.org/nomicon/leaking.html)) + pub fn forget(self) { + std::mem::forget(self); + } +} impl ContiguousMemoryStorage { /// Clones the allocated memory region into a new ContiguousMemoryStorage. /// /// This function isn't unsafe, even though it ignores presence of `Copy` /// bound on stored data, because it doesn't create any pointers. - pub unsafe fn copy_storage(&self) -> Self { + #[must_use] + pub fn copy_data(&self) -> Self { let current_layout = self.get_layout(); let result = Self::new_from_layout(current_layout).expect("current layout should be valid"); - core::ptr::copy_nonoverlapping(self.get_base(), result.get_base(), current_layout.size()); + unsafe { + core::ptr::copy_nonoverlapping( + self.get_base(), + result.get_base(), + current_layout.size(), + ); + } result } @@ -325,7 +344,6 @@ impl ContiguousMemoryStorage { /// This function is considered unsafe because it can mark a memory range /// as free while a valid reference is pointing to it from another place in /// code. - #[inline(always)] pub unsafe fn free_typed(&mut self, position: *mut T) { Self::free(self, position, size_of::()) } @@ -549,11 +567,11 @@ mod test { let value_string = "This is a test string".to_string(); let value_byte = 0x41u8; - let stored_ref_number = memory.store(value_number); - let stored_ref_car_a = memory.store(car_a.clone()); - let stored_ref_string = memory.store(value_string.clone()); - let stored_ref_byte = memory.store(value_byte); - let stored_ref_car_b = memory.store(car_b.clone()); + let stored_ref_number = memory.push(value_number); + let stored_ref_car_a = memory.push(car_a.clone()); + let stored_ref_string = memory.push(value_string.clone()); + let stored_ref_byte = memory.push(value_byte); + let stored_ref_car_b = memory.push(car_b.clone()); assert_eq!(*stored_ref_number.get(), value_number); assert_eq!(*stored_ref_car_a.get(), car_a); @@ -578,7 +596,7 @@ mod test { miles: 30123, }; - let stored_car = memory.store(car_a.clone()); + let stored_car = memory.push(car_a.clone()); assert!(memory.resize(32).is_err()); memory.resize(1024).unwrap(); diff --git a/src/range.rs b/src/range.rs index b544837..fd6e0a2 100644 --- a/src/range.rs +++ b/src/range.rs @@ -27,18 +27,24 @@ impl ByteRange { /// Aligns this byte range to the provided `alignment`. pub fn aligned(&self, alignment: usize) -> Self { - let offset = (self.0 as *const u8).align_offset(alignment); - ByteRange(self.0 + offset, self.1 + offset) + let modulo = self.0 % alignment; + if modulo == 0 { + return *self; + } + self.offset(alignment - modulo) + } + + /// Caps the end address of this byte range to the provided `position`. + pub fn cap_end(&self, position: usize) -> Self { + ByteRange(self.0, position.min(self.1)) } - /// Caps the size of this byte range to the provided `size` and returns it. - /// If the size of this byte range is lesser than the required size, `None` - /// is returned instead. - pub fn cap_size(&self, size: usize) -> Option { + /// Caps the size of this byte range to the provided `size`. + pub fn cap_size(&self, size: usize) -> Self { if self.len() < size { - return None; + return *self; } - Some(ByteRange(self.0, self.0 + size)) + ByteRange(self.0, self.0 + size) } /// Offsets this byte range by a provided unsigned `offset`. diff --git a/src/refs.rs b/src/refs.rs index 3896bfc..fc738c4 100644 --- a/src/refs.rs +++ b/src/refs.rs @@ -1,9 +1,9 @@ //! Contains code relating to returned reference types and their internal state. //! -//! Default implementation returns [`ContiguousMemoryRef`] when items are +//! Default implementation returns [`ContiguousEntryRef`] when items are //! stored which can be easily accessed through [`CMRef`] alias. //! -//! Concurrent implementation returns [`SyncContiguousMemoryRef`] when items are +//! Concurrent implementation returns [`SyncContiguousEntryRef`] when items are //! stored which can be easily accessed through [`SCMRef`] alias. use core::{ @@ -18,60 +18,9 @@ use crate::{ types::*, }; -/// Trait specifying interface of returned reference types. -pub trait ContiguousMemoryReference { - /// Error type returned when the data represented by the reference can't be - /// safely accessed/borrowed. - /// - /// - For concurrent implementation it's [`LockingError`]. - /// - For default implementation it's [`RegionBorrowedError`] - type BorrowError; - - /// Returns a byte range within container memory this reference points to. - fn range(&self) -> ByteRange; - - /// Returns a reference to data at its current location and panics if the - /// reference has been mutably borrowed or blocks the thread for the - /// concurrent implementation. - fn get<'a>(&'a self) -> Impl::LockResult> - where - T: RefSizeReq; - - /// Returns a reference to data at its current location and returns the - /// appropriate [error](Self::BorrowError) if that's not possible. - fn try_get<'a>(&'a self) -> Result, Self::BorrowError> - where - T: RefSizeReq; - - /// Returns a mutable reference to data at its current location and panics - /// if the reference has been mutably borrowed or blocks the thread for - /// concurrent implementation. - fn get_mut<'a>(&'a mut self) -> Impl::LockResult> - where - T: RefSizeReq; - - /// Returns a mutable reference to data at its current location or an error - /// [error](Self::BorrowError) if the represented memory region is already - /// mutably borrowed. - fn try_get_mut<'a>(&'a mut self) -> Result, Self::BorrowError> - where - T: RefSizeReq; - - /// Casts this reference into a dynamic type `R`. - #[cfg(feature = "ptr_metadata")] - fn into_dyn(self) -> Impl::ReferenceType - where - T: Sized + Unsize; - - /// Tries downcasting this dynamic reference into a discrete type `R`, - /// returns None if `R` drop handler doesn't match the original one. - #[cfg(feature = "ptr_metadata")] - fn downcast_dyn>(self) -> Option>; -} - /// A synchronized (thread-safe) reference to `T` data stored in a /// [`ContiguousMemoryStorage`](crate::ContiguousMemoryStorage) structure. -pub struct SyncContiguousMemoryRef { +pub struct SyncContiguousEntryRef { pub(crate) inner: Arc>, #[cfg(feature = "ptr_metadata")] pub(crate) metadata: ::Metadata, @@ -79,13 +28,12 @@ pub struct SyncContiguousMemoryRef { pub(crate) _phantom: PhantomData, } -/// A shorter type name for [`SyncContiguousMemoryRef`]. -pub type SCMRef = SyncContiguousMemoryRef; +/// A shorter type name for [`SyncContiguousEntryRef`]. +pub type SCERef = SyncContiguousEntryRef; -impl ContiguousMemoryReference for SyncContiguousMemoryRef { - type BorrowError = LockingError; - - fn range(&self) -> ByteRange { +impl SyncContiguousEntryRef { + /// Returns a byte range within container memory this reference points to. + pub fn range(&self) -> ByteRange { self.inner.range } @@ -95,7 +43,7 @@ impl ContiguousMemoryReference for SyncContiguousM /// /// If the data is mutably accessed, this method will block the current /// thread until it becomes available. - fn get<'a>(&'a self) -> Result, LockingError> + pub fn get<'a>(&'a self) -> Result, LockingError> where T: RefSizeReq, { @@ -120,10 +68,10 @@ impl ContiguousMemoryReference for SyncContiguousM /// [`LockingError::Poisoned`](crate::error::LockingError::Poisoned) error /// if the Mutex holding the `base` address pointer has been poisoned. /// - /// If the data is mutably accessed, this method return a + /// If the data is mutably accessed, this method returns a /// [`LockingError::WouldBlock`](crate::error::LockingError::WouldBlock) /// error. - fn try_get<'a>(&'a self) -> Result, LockingError> + pub fn try_get<'a>(&'a self) -> Result, LockingError> where T: RefSizeReq, { @@ -151,7 +99,9 @@ impl ContiguousMemoryReference for SyncContiguousM /// [`LockingError::Poisoned`] error if the Mutex holding the base address /// pointer or the Mutex holding concurrent mutable access flag has been /// poisoned. - fn get_mut<'a>(&'a mut self) -> Result, LockingError> + pub fn get_mut<'a>( + &'a mut self, + ) -> Result, LockingError> where T: RefSizeReq, { @@ -173,6 +123,8 @@ impl ContiguousMemoryReference for SyncContiguousM /// Returns a write guard to referenced data at its current location or a /// `LockingError` if that isn't possible. /// + /// # Errors + /// /// This function can return the following errors: /// /// - [`LockingError::Poisoned`] error if the Mutex holding the base address @@ -181,7 +133,7 @@ impl ContiguousMemoryReference for SyncContiguousM /// /// - [`LockingError::WouldBlock`] error if accessing referenced data chunk /// would be blocking. - fn try_get_mut<'a>( + pub fn try_get_mut<'a>( &'a mut self, ) -> Result, LockingError> where @@ -192,7 +144,7 @@ impl ContiguousMemoryReference for SyncContiguousM .borrow_kind .try_write_named(LockSource::Reference)?; unsafe { - let base = ImplConcurrent::get_base(&self.inner.state.base)?; + let base = ImplConcurrent::try_get_base(&self.inner.state.base)?; let pos = base.add(self.inner.range.0); Ok(MemoryWriteGuard { state: self.inner.clone(), @@ -205,36 +157,123 @@ impl ContiguousMemoryReference for SyncContiguousM } } + /// Casts this reference into a dynamic type `R`. #[cfg(feature = "ptr_metadata")] - fn into_dyn(self) -> SyncContiguousMemoryRef + pub fn into_dyn(self) -> SyncContiguousEntryRef where T: Sized + Unsize, { unsafe { - SyncContiguousMemoryRef { + SyncContiguousEntryRef { inner: core::mem::transmute(self.inner), metadata: static_metadata::(), } } } + /// Tries downcasting this dynamic reference into a discrete type `R`, + /// returns None if `R` drop handler doesn't match the original one. #[cfg(feature = "ptr_metadata")] - fn downcast_dyn>(self) -> Option> { + pub fn downcast_dyn>(self) -> Option> { if self.inner.drop_metadata != static_metadata::() { return None; } unsafe { - Some(SyncContiguousMemoryRef { + Some(SyncContiguousEntryRef { inner: core::mem::transmute(self.inner), metadata: (), }) } } + + /// Creates an immutable pointer to underlying data, blocking the current + /// thread until base address can be read. + /// + /// This function can return a [`LockingError::Poisoned`] error if the Mutex + /// holding the base address pointer has been poisoned. + /// + /// # Safety + /// + /// See: [`ContiguousEntryRef::as_ptr`] + pub unsafe fn as_ptr(&self) -> Result<*const T, LockingError> + where + T: RefSizeReq, + { + self.as_ptr_mut().map(|it| it as *const T) + } + + /// Creates a mutable pointer to underlying data, blocking the current + /// thread until base address can be read. + /// + /// This function can return a [`LockingError::Poisoned`] error if the Mutex + /// holding the base address pointer has been poisoned. + /// + /// # Safety + /// + /// See: [`ContiguousEntryRef::as_ptr_mut`] + pub unsafe fn as_ptr_mut(&self) -> Result<*mut T, LockingError> + where + T: RefSizeReq, + { + let base = ImplConcurrent::get_base(&self.inner.state.base)?; + let pos = base.add(self.inner.range.0); + #[cfg(not(feature = "ptr_metadata"))] + { + Ok(pos as *mut T) + } + #[cfg(feature = "ptr_metadata")] + { + Ok(core::ptr::from_raw_parts_mut::( + pos as *mut (), + self.metadata, + )) + } + } + + /// Creates an immutable pointer to underlying data while also preventing + /// the occupied memory region from being marked as free, blocking the + /// current thread until base address can be read + /// + /// This function can return a [`LockingError::Poisoned`] error if the Mutex + /// holding the base address pointer has been poisoned. + /// + /// # Safety + /// + /// See: [`ContiguousEntryRef::into_ptr`] + pub unsafe fn into_ptr(self) -> Result<*const T, LockingError> + where + T: RefSizeReq, + { + self.into_ptr_mut().map(|it| it as *const T) + } + + /// Creates a mutable pointer to underlying data while also preventing + /// the occupied memory region from being marked as free, blocking the + /// current thread until base address can be read + /// + /// This function can return a [`LockingError::Poisoned`] error if the Mutex + /// holding the base address pointer has been poisoned. + /// + /// # Safety + /// + /// See: [`ContiguousEntryRef::into_ptr_mut`] + pub unsafe fn into_ptr_mut(self) -> Result<*mut T, LockingError> + where + T: RefSizeReq, + { + let result = self.as_ptr_mut(); + let inner: *mut ReferenceState = self.inner.as_ref() + as *const ReferenceState + as *mut ReferenceState; + std::ptr::drop_in_place(&mut (*inner).state); + std::mem::forget(self.inner); + result + } } -impl Clone for SyncContiguousMemoryRef { +impl Clone for SyncContiguousEntryRef { fn clone(&self) -> Self { - SyncContiguousMemoryRef { + SyncContiguousEntryRef { inner: self.inner.clone(), #[cfg(feature = "ptr_metadata")] metadata: self.metadata.clone(), @@ -245,9 +284,9 @@ impl Clone for SyncContiguousMemoryRef { } #[cfg(feature = "debug")] -impl core::fmt::Debug for SyncContiguousMemoryRef { +impl core::fmt::Debug for SyncContiguousEntryRef { fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { - f.debug_struct("SyncContiguousMemoryRef") + f.debug_struct("SyncContiguousEntryRef") .field("inner", &self.inner) .finish() } @@ -255,31 +294,36 @@ impl core::fmt::Debug for SyncContiguousMemoryRef { /// A thread-unsafe reference to `T` data stored in /// [`ContiguousMemoryStorage`](crate::ContiguousMemoryStorage) structure. -pub struct ContiguousMemoryRef { +pub struct ContiguousEntryRef { pub(crate) inner: Rc>, #[cfg(feature = "ptr_metadata")] pub(crate) metadata: ::Metadata, #[cfg(not(feature = "ptr_metadata"))] pub(crate) _phantom: PhantomData, } -/// A shorter type name for [`ContiguousMemoryRef`]. -pub type CMRef = ContiguousMemoryRef; -impl ContiguousMemoryReference for ContiguousMemoryRef { - type BorrowError = RegionBorrowedError; +/// A shorter type name for [`ContiguousEntryRef`]. +pub type CERef = ContiguousEntryRef; - fn range(&self) -> ByteRange { +impl ContiguousEntryRef { + /// Returns a byte range within container memory this reference points to. + pub fn range(&self) -> ByteRange { self.inner.range } - fn get<'a>(&'a self) -> MemoryReadGuard<'a, T, ImplDefault> + /// Returns a reference to data at its current location and panics if the + /// represented memory region is mutably borrowed. + pub fn get<'a>(&'a self) -> MemoryReadGuard<'a, T, ImplDefault> where T: RefSizeReq, { - ContiguousMemoryRef::::try_get(self).expect("mutably borrowed") + ContiguousEntryRef::::try_get(self).expect("mutably borrowed") } - fn try_get<'a>(&'a self) -> Result, RegionBorrowedError> + /// Returns a reference to data at its current location or a + /// [`RegionBorrowedError`] error if the represented memory region is + /// mutably borrowed. + pub fn try_get<'a>(&'a self) -> Result, RegionBorrowedError> where T: RefSizeReq, { @@ -307,16 +351,19 @@ impl ContiguousMemoryReference for ContiguousMemoryRe } } - fn get_mut<'a>(&'a mut self) -> MemoryWriteGuard<'a, T, ImplDefault> + /// Returns a mutable reference to data at its current location and panics + /// if the reference has already been borrowed. + pub fn get_mut<'a>(&'a mut self) -> MemoryWriteGuard<'a, T, ImplDefault> where T: RefSizeReq, { - ContiguousMemoryRef::::try_get_mut(self).expect("mutably borrowed") + ContiguousEntryRef::::try_get_mut(self).expect("mutably borrowed") } - /// This implementation returns a [`RegionBorrowedError`] error if the - /// represented memory region is already borrowed. - fn try_get_mut<'a>( + /// Returns a mutable reference to data at its current location or a + /// [`RegionBorrowedError`] error if the represented memory region is + /// already borrowed. + pub fn try_get_mut<'a>( &'a mut self, ) -> Result, RegionBorrowedError> where @@ -345,36 +392,117 @@ impl ContiguousMemoryReference for ContiguousMemoryRe } } + /// Casts this reference into a dynamic type `R`. #[cfg(feature = "ptr_metadata")] - fn into_dyn(self) -> ContiguousMemoryRef + pub fn into_dyn(self) -> ContiguousEntryRef where T: Sized + Unsize, { unsafe { - ContiguousMemoryRef { + ContiguousEntryRef { inner: core::mem::transmute(self.inner), metadata: static_metadata::(), } } } + /// Tries downcasting this dynamic reference into a discrete type `R`, + /// returns None if `R` drop handler doesn't match the original one. #[cfg(feature = "ptr_metadata")] - fn downcast_dyn>(self) -> Option> { + pub fn downcast_dyn>(self) -> Option> { if self.inner.drop_metadata != static_metadata::() { return None; } unsafe { - Some(ContiguousMemoryRef { + Some(ContiguousEntryRef { inner: core::mem::transmute(self.inner), metadata: (), }) } } + + /// Creates an immutable pointer to underlying data. + /// + /// # Safety + /// + /// This function returns a pointer that may become invalid if the + /// container's memory is resized to a capacity which requires the memory + /// segment to be moved. + /// + /// When the reference goes out of scope, its region will be marked as free + /// which means that a subsequent call to [`ContiguousMemoryStorage::push`] + /// or friends can cause undefined behavior when dereferencing the pointer. + pub unsafe fn as_ptr(&self) -> *const T + where + T: RefSizeReq, + { + self.as_ptr_mut() as *const T + } + + /// Creates a mutable pointer to underlying data. + /// + /// # Safety + /// + /// In addition to concerns noted in [`ContiguousEntryRef::as_ptr`], + /// this function also provides mutable access to the underlying data + /// allowing potential data races. + pub unsafe fn as_ptr_mut(&self) -> *mut T + where + T: RefSizeReq, + { + let base = ImplDefault::get_base(&self.inner.state.base); + let pos = base.add(self.inner.range.0); + + #[cfg(not(feature = "ptr_metadata"))] + { + pos as *mut T + } + #[cfg(feature = "ptr_metadata")] + { + core::ptr::from_raw_parts_mut::(pos as *mut (), self.metadata) + } + } + + /// Creates an immutable pointer to underlying data while also preventing + /// the occupied memory region from being marked as free. + /// + /// # Safety + /// + /// This function returns a pointer that may become invalid if the + /// container's memory is resized to a capacity which requires the memory + /// segment to be moved. + pub unsafe fn into_ptr(self) -> *const T + where + T: RefSizeReq, + { + self.into_ptr_mut() as *const T + } + + /// Creates a mutable pointer to underlying data while also preventing + /// the occupied memory region from being marked as free. + /// + /// # Safety + /// + /// In addition to concerns noted in + /// [`ContiguousEntryRef::into_ptr`], this function also provides + /// mutable access to the underlying data allowing potential data races. + pub unsafe fn into_ptr_mut(self) -> *mut T + where + T: RefSizeReq, + { + let result = self.as_ptr_mut(); + let inner: *mut ReferenceState = self.inner.as_ref() + as *const ReferenceState + as *mut ReferenceState; + std::ptr::drop_in_place(&mut (*inner).state); + std::mem::forget(self.inner); + result + } } -impl Clone for ContiguousMemoryRef { +impl Clone for ContiguousEntryRef { fn clone(&self) -> Self { - ContiguousMemoryRef { + ContiguousEntryRef { inner: self.inner.clone(), #[cfg(feature = "ptr_metadata")] metadata: self.metadata.clone(), @@ -385,9 +513,9 @@ impl Clone for ContiguousMemoryRef { } #[cfg(feature = "debug")] -impl core::fmt::Debug for ContiguousMemoryRef { +impl core::fmt::Debug for ContiguousEntryRef { fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { - f.debug_struct("ContiguousMemoryRef") + f.debug_struct("ContiguousEntryRef") .field("inner", &self.inner) .finish() } @@ -396,7 +524,7 @@ impl core::fmt::Debug for ContiguousMemoryRef { pub(crate) mod sealed { use super::*; - /// Internal state of [`ContiguousMemoryRef`] and [`SyncContiguousMemoryRef`]. + /// Internal state of [`ContiguousEntryRef`] and [`SyncContiguousEntryRef`]. pub struct ReferenceState { pub state: Impl::StorageState, pub range: ByteRange, diff --git a/src/tracker.rs b/src/tracker.rs index cac31c8..617dbcf 100644 --- a/src/tracker.rs +++ b/src/tracker.rs @@ -116,7 +116,7 @@ impl AllocationTracker { it.len() >= layout.size() && it.aligned(layout.align()).len() >= layout.size() })?; - let usable = available.aligned(layout.align()).cap_size(layout.size())?; + let usable = available.aligned(layout.align()).cap_size(layout.size()); Some(usable) } @@ -162,7 +162,11 @@ impl AllocationTracker { /// On success, it returns a [`ByteRange`] of the memory region that was /// taken, or a [`ContiguousMemoryError::NoStorageLeft`] error if the /// requested `layout` cannot be placed within any free regions. - pub fn take_next(&mut self, layout: Layout) -> Result { + pub fn take_next( + &mut self, + base_address: usize, + layout: Layout, + ) -> Result { if layout.size() > self.size { return Err(ContiguousMemoryError::NoStorageLeft); } @@ -172,14 +176,20 @@ impl AllocationTracker { .iter() .enumerate() .find(|(_, it)| { - it.len() >= layout.size() && it.aligned(layout.align()).len() >= layout.size() + if it.len() < layout.size() { + return false; + } + + let aligned = it + .offset(base_address) + .aligned(layout.align()) + .cap_end(base_address + self.len()); + + aligned.len() >= layout.size() }) .ok_or(ContiguousMemoryError::NoStorageLeft)?; - let taken = available - .aligned(layout.align()) - .cap_size(layout.size()) - .ok_or(ContiguousMemoryError::NoStorageLeft)?; + let taken = available.aligned(layout.align()).cap_size(layout.size()); let (left, right) = available.difference_unchecked(taken); @@ -264,7 +274,7 @@ mod tests { let mut tracker = AllocationTracker::new(1024); let range = tracker - .take_next(Layout::from_size_align(32, 8).unwrap()) + .take_next(0, Layout::from_size_align(32, 8).unwrap()) .unwrap(); assert_eq!(range, ByteRange(0, 32)); @@ -288,7 +298,7 @@ mod tests { let mut tracker = AllocationTracker::new(1024); let layout = Layout::from_size_align(128, 8).unwrap(); - let range = tracker.take_next(layout).unwrap(); + let range = tracker.take_next(0, layout).unwrap(); assert_eq!(range, ByteRange(0, 128)); } } diff --git a/src/types.rs b/src/types.rs index 4fc9821..b82adc4 100644 --- a/src/types.rs +++ b/src/types.rs @@ -59,7 +59,7 @@ impl MutexTypesafe for Mutex { match self.try_lock() { Ok(it) => Ok(it), Err(std::sync::TryLockError::Poisoned(_)) => Err(LockingError::Poisoned { source }), - Err(std::sync::TryLockError::WouldBlock) => Err(LockingError::WouldBlock), + Err(std::sync::TryLockError::WouldBlock) => Err(LockingError::WouldBlock { source }), } } } @@ -97,7 +97,7 @@ impl RwLockTypesafe for RwLock { fn try_read_named(&self, source: LockSource) -> Result, LockingError> { match self.try_read() { Ok(guard) => Ok(guard), - Err(std::sync::TryLockError::WouldBlock) => Err(LockingError::WouldBlock), + Err(std::sync::TryLockError::WouldBlock) => Err(LockingError::WouldBlock { source }), Err(std::sync::TryLockError::Poisoned(_)) => Err(LockingError::Poisoned { source }), } } @@ -112,7 +112,7 @@ impl RwLockTypesafe for RwLock { fn try_write_named(&self, source: LockSource) -> Result, LockingError> { match self.try_write() { Ok(guard) => Ok(guard), - Err(std::sync::TryLockError::WouldBlock) => Err(LockingError::WouldBlock), + Err(std::sync::TryLockError::WouldBlock) => Err(LockingError::WouldBlock { source }), Err(std::sync::TryLockError::Poisoned(_)) => Err(LockingError::Poisoned { source }), } } @@ -198,6 +198,8 @@ mod pointer { /// /// This is a workaround for invoking [`Drop::drop`] as well as calling /// compiler generated drop glue dynamically. + /// + /// Note that `do_drop` doesn't deallocate the memory. pub trait HandleDrop { fn do_drop(&mut self); }