@@ -8,9 +8,20 @@ use rlsf::Tlsf;
88
99type TlsfHeap = Tlsf < ' static , usize , usize , { usize:: BITS as usize } , { usize:: BITS as usize } > ;
1010
11+ struct Inner {
12+ tlsf : TlsfHeap ,
13+ initialized : bool ,
14+ raw_block : Option < NonNull < [ u8 ] > > ,
15+ raw_block_size : usize ,
16+ }
17+
18+ // Safety: The whole inner type is wrapped by a [Mutex].
19+ unsafe impl Sync for Inner { }
20+ unsafe impl Send for Inner { }
21+
1122/// A two-Level segregated fit heap.
1223pub struct Heap {
13- heap : Mutex < RefCell < ( TlsfHeap , bool ) > > ,
24+ heap : Mutex < RefCell < Inner > > ,
1425}
1526
1627impl Heap {
@@ -20,7 +31,12 @@ impl Heap {
2031 /// [`init`](Self::init) method before using the allocator.
2132 pub const fn empty ( ) -> Heap {
2233 Heap {
23- heap : Mutex :: new ( RefCell :: new ( ( ConstDefault :: DEFAULT , false ) ) ) ,
34+ heap : Mutex :: new ( RefCell :: new ( Inner {
35+ tlsf : ConstDefault :: DEFAULT ,
36+ initialized : false ,
37+ raw_block : None ,
38+ raw_block_size : 0 ,
39+ } ) ) ,
2440 }
2541 }
2642
@@ -59,25 +75,52 @@ impl Heap {
5975 assert ! ( size > 0 ) ;
6076 critical_section:: with ( |cs| {
6177 let mut heap = self . heap . borrow_ref_mut ( cs) ;
62- assert ! ( !heap. 1 ) ;
63- heap. 1 = true ;
64- let block: & [ u8 ] = core:: slice:: from_raw_parts ( start_addr as * const u8 , size) ;
65- heap. 0 . insert_free_block_ptr ( block. into ( ) ) ;
78+ assert ! ( !heap. initialized) ;
79+ heap. initialized = true ;
80+ let block: NonNull < [ u8 ] > =
81+ NonNull :: slice_from_raw_parts ( NonNull :: new_unchecked ( start_addr as * mut u8 ) , size) ;
82+ heap. tlsf . insert_free_block_ptr ( block) ;
83+ heap. raw_block = Some ( block) ;
84+ heap. raw_block_size = size;
6685 } ) ;
6786 }
6887
6988 fn alloc ( & self , layout : Layout ) -> Option < NonNull < u8 > > {
70- critical_section:: with ( |cs| self . heap . borrow_ref_mut ( cs) . 0 . allocate ( layout) )
89+ critical_section:: with ( |cs| self . heap . borrow_ref_mut ( cs) . tlsf . allocate ( layout) )
7190 }
7291
7392 unsafe fn dealloc ( & self , ptr : * mut u8 , layout : Layout ) {
7493 critical_section:: with ( |cs| {
7594 self . heap
7695 . borrow_ref_mut ( cs)
77- . 0
96+ . tlsf
7897 . deallocate ( NonNull :: new_unchecked ( ptr) , layout. align ( ) )
7998 } )
8099 }
100+
101+ /// Get the amount of bytes used by the allocator.
102+ pub fn used ( & self ) -> usize {
103+ critical_section:: with ( |cs| {
104+ self . heap . borrow_ref_mut ( cs) . raw_block_size - self . free_with_cs ( cs)
105+ } )
106+ }
107+
108+ /// Get the amount of free bytes in the allocator.
109+ pub fn free ( & self ) -> usize {
110+ critical_section:: with ( |cs| self . free_with_cs ( cs) )
111+ }
112+
113+ fn free_with_cs ( & self , cs : critical_section:: CriticalSection ) -> usize {
114+ let inner_mut = self . heap . borrow_ref_mut ( cs) ;
115+ unsafe {
116+ inner_mut
117+ . tlsf
118+ . iter_blocks ( inner_mut. raw_block . unwrap ( ) )
119+ . filter ( |block_info| !block_info. is_occupied ( ) )
120+ . map ( |block_info| block_info. max_payload_size ( ) )
121+ . sum :: < usize > ( )
122+ }
123+ }
81124}
82125
83126unsafe impl GlobalAlloc for Heap {
0 commit comments