1
1
#pragma once
2
2
3
+ #include < array>
3
4
#include < snmalloc/snmalloc.h>
5
+ #include < test/xoroshiro.h>
6
+
7
+ #if __has_feature(address_sanitizer)
8
+ # include < sanitizer/asan_interface.h>
9
+ // Asan does not appear to support a __asan_update_deallocation_context
10
+ # define VERONA_TRACK_FREE (ptr, size ) __asan_poison_memory_region(ptr, size);
11
+ # define VERONA_TRACK_ALLOC (ptr, size ) __asan_unpoison_memory_region(ptr, size); __asan_update_allocation_context(ptr);
12
+ # define VERONA_NO_SANITIZE_ADDRESS __attribute__ ((no_sanitize_address))
13
+ #else
14
+ # define VERONA_POISON_MEMORY (ptr, size )
15
+ # define VERONA_UNPOISON_MEMORY (ptr, size )
16
+ # define VERONA_NO_SANITIZE_ADDRESS
17
+ #endif
4
18
5
19
namespace verona ::rt::heap
6
20
{
21
+ #ifdef USE_REPLAY_ALLOCATOR
22
+ class ReplayAllocator
23
+ {
24
+ struct Node
25
+ {
26
+ Node* next;
27
+ };
28
+
29
+ static inline std::array<Node*, 256 > allocs;
30
+ static inline std::array<size_t , 256 > lengths;
31
+ static inline snmalloc::FlagWord lock;
32
+ static inline xoroshiro::p128r32 rng;
33
+
34
+ public:
35
+ static void set_seed (uint64_t seed)
36
+ {
37
+ rng.set_state (seed);
38
+ }
39
+
40
+ VERONA_NO_SANITIZE_ADDRESS
41
+ static void * alloc (size_t size)
42
+ {
43
+ auto sc = snmalloc::size_to_sizeclass_full (size);
44
+ auto idx = sc.index ();
45
+ {
46
+ snmalloc::FlagLock l (lock);
47
+ if (lengths[idx] > 0 )
48
+ {
49
+ // Reuse an element if there are at least 16, or
50
+ // the randomisation says to.
51
+ auto r = rng.next () & 0xf ;
52
+ auto reuse =
53
+ lengths[idx] > 16 || (r == 0 );
54
+ if (reuse)
55
+ {
56
+ auto r = rng.next ();
57
+ Node** prev = &allocs[idx];
58
+ for (size_t i = 0 ; i < r % lengths[idx]; i++)
59
+ {
60
+ prev = &(*prev)->next ;
61
+ }
62
+
63
+ auto curr = *prev;
64
+ auto next = curr->next ;
65
+ *prev = next;
66
+ lengths[idx]--;
67
+ VERONA_TRACK_ALLOC (curr, size);
68
+ return curr;
69
+ }
70
+ }
71
+ }
72
+
73
+ return snmalloc::ThreadAlloc::get ().alloc (size);
74
+ }
75
+
76
+ static void dealloc (void * ptr, size_t size)
77
+ {
78
+ auto sc = snmalloc::size_to_sizeclass_full (size);
79
+ auto idx = sc.index ();
80
+ snmalloc::FlagLock l (lock);
81
+ auto hd = reinterpret_cast <Node*>(ptr);
82
+ hd->next = allocs[idx];
83
+ allocs[idx] = hd;
84
+ lengths[idx]++;
85
+ VERONA_TRACK_FREE (ptr, size);
86
+ }
87
+
88
+ VERONA_NO_SANITIZE_ADDRESS
89
+ static void flush ()
90
+ {
91
+ for (size_t i = 0 ; i < allocs.size (); i++)
92
+ {
93
+ auto hd = allocs[i];
94
+ size_t count = 0 ;
95
+ while (hd != nullptr )
96
+ {
97
+ auto next = hd->next ;
98
+ snmalloc::ThreadAlloc::get ().dealloc (
99
+ hd, snmalloc::sizeclass_to_size (i));
100
+ hd = next;
101
+ count++;
102
+ }
103
+ assert (count == lengths[i]);
104
+ lengths[i] = 0 ;
105
+ allocs[i] = nullptr ;
106
+ }
107
+ }
108
+ };
109
+
110
+ inline void * alloc (size_t size)
111
+ {
112
+ return ReplayAllocator::alloc (size);
113
+ }
114
+
115
+ template <size_t size>
116
+ inline void * alloc ()
117
+ {
118
+ return ReplayAllocator::alloc (size);
119
+ }
120
+
121
+ inline void * calloc (size_t size)
122
+ {
123
+ auto obj = ReplayAllocator::alloc (size);
124
+ memset (obj, 0 , size);
125
+ return obj;
126
+ }
127
+
128
+ template <size_t size>
129
+ inline void * calloc ()
130
+ {
131
+ auto obj = ReplayAllocator::alloc (size);
132
+ memset (obj, 0 , size);
133
+ return obj;
134
+ }
135
+
136
+ inline void dealloc (void * ptr, size_t size)
137
+ {
138
+ ReplayAllocator::dealloc (ptr, size);
139
+ }
140
+
141
+ inline void dealloc (void * ptr)
142
+ {
143
+ auto size = snmalloc::ThreadAlloc::get ().alloc_size (ptr);
144
+ dealloc (ptr, size);
145
+ }
146
+
147
+ template <size_t size>
148
+ inline void dealloc (void * ptr)
149
+ {
150
+ ReplayAllocator::dealloc (ptr, size);
151
+ }
152
+
153
+ inline void debug_check_empty ()
154
+ {
155
+ ReplayAllocator::flush ();
156
+ snmalloc::debug_check_empty<snmalloc::Alloc::Config>();
157
+ }
158
+
159
+ inline void set_seed (uint64_t seed)
160
+ {
161
+ ReplayAllocator::set_seed (seed);
162
+ }
163
+ #else
7
164
inline void * alloc (size_t size)
8
165
{
9
166
return snmalloc::ThreadAlloc::get ().alloc (size);
@@ -26,7 +183,6 @@ namespace verona::rt::heap
26
183
return snmalloc::ThreadAlloc::get ().alloc <size, snmalloc::YesZero>();
27
184
}
28
185
29
-
30
186
inline void dealloc (void * ptr)
31
187
{
32
188
return snmalloc::ThreadAlloc::get ().dealloc (ptr);
@@ -37,7 +193,8 @@ namespace verona::rt::heap
37
193
return snmalloc::ThreadAlloc::get ().dealloc (ptr, size);
38
194
}
39
195
40
- template <size_t size> inline void dealloc (void * ptr)
196
+ template <size_t size>
197
+ inline void dealloc (void * ptr)
41
198
{
42
199
return snmalloc::ThreadAlloc::get ().dealloc <size>(ptr);
43
200
}
@@ -46,4 +203,10 @@ namespace verona::rt::heap
46
203
{
47
204
snmalloc::debug_check_empty<snmalloc::Alloc::Config>();
48
205
}
206
+
207
+ inline void set_seed (uint64_t seed)
208
+ {
209
+ // Do nothing
210
+ }
211
+ #endif
49
212
} // namespace verona::rt::heap
0 commit comments