-
Notifications
You must be signed in to change notification settings - Fork 9
/
alloc.cpp
173 lines (147 loc) · 5.64 KB
/
alloc.cpp
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
#include <stdlib.h> // malloc()
#include <stdint.h> // uint8_t
#include <string.h> // memset()
#include "alloc.h" // MEM
#include "common.h"
/********************************** ALLOC *****************************************
Memory allocation and de-allocation utilities.
***********************************************************************************
- Each memory arena is a list of contiguous blocks. Its pointer points
to the current one (which is the last) in use.
- Freeing an arena puts all its blocks in a list of free blocks.
*/
static constexpr size_t kilobytes(ssize_t n) {
return n*1024;
}
static constexpr size_t roundup(size_t x, size_t n) {
return ((x)+((n)-1)) & (~((n)-1));
}
constexpr int NARENAS = (int)MEM::__LEN;
constexpr ssize_t ALIGN_SIZE = sizeof(void*);
constexpr ssize_t BLOCK_TAIL = kilobytes(10);
// NOTE(stefanos): A block is naturally aligned because it only contains pointers.
// This is true for 32-bit systems as well since e.g. a 64-bit value is going to be
// emulated.
struct block_t {
block_t *next;
uint8_t *avail;
uint8_t *limit;
};
// TODO: Insert testing code that we match NARENAS
// Holder of the first block which is empty. Used only to specify empty
// arenas.
block_t first[NARENAS]= { { NULL }, { NULL }, { NULL }, { NULL }, { NULL }, { NULL }, { NULL } };
// The actual arenas
block_t *arena[NARENAS] = { &first[0], &first[1], &first[2], &first[3], &first[4], &first[5], &first[6] };
// List of free blocks. When an arena is free'd
block_t *freeblocks;
/*
allocate: Give pointer to free contiguous memory of at least size `sizeof (T)`.
-------------------------------------------------------------------------
This internally means do one of the 3:
1) If there's available space on the current block in use (the last
block put in the arena), return a pointer and update `avail`.
2) If not, then search the `freeblocks` list for a big enough block.
3) If none is found, then allocate a new block and put it in the tail
of the arena. Update its pointer to point to this block.
*/
/// Raw size version
void *allocate(size_t n, MEM enum_ar) {
int ar = (int) enum_ar;
block_t *ap = arena[ar];
assert(ap);
n = roundup(n, ALIGN_SIZE);
if (n > (ap->limit - ap->avail)) {
ap->next = freeblocks;
if (ap->next != NULL) {
do {
freeblocks = freeblocks->next;
ap = ap->next;
} while(n > (ap->limit - ap->avail));
} else { // Need to allocate
size_t sz = sizeof(block_t) + n + roundup(BLOCK_TAIL, ALIGN_SIZE);
// Set `next` to the new block first so that if this is the first block
// in the arena `ar`, then first[ar].next will be set to this new block.
ap->next = (block_t *)malloc(sz);
ap = ap->next;
assert(ap && "Insufficient memory");
ap->limit = (uint8_t*)ap + sz;
}
ap->avail = (uint8_t*)(ap + 1);
ap->next = NULL;
arena[ar] = ap;
}
ap->avail += n;
void *ret = (void*)(ap->avail - n);
assert(ret);
return ret;
}
void *allocate_zero(size_t n, MEM ar) {
void *mem = allocate(n, ar);
assert(mem);
// Zero it
memset(mem, 0, n);
return mem;
}
/*
deallocate: Deallocate a whole arena.
-------------------------------------------------------------------------
This internally means to take its list of blocks (which start at its
first->next) and put in the freeblocks list.
*/
// Internally, that means
void deallocate(MEM enum_ar) {
int ar = (int) enum_ar;
// Check for double deallocation.
if (arena[ar]->next == NULL) return;
// Connect the arena's last block with the start of free blocks list
arena[ar]->next = freeblocks;
// Now point the start of the free blocks list to the first block of the arena
// (which is the start of the chain connecting arena's block and the free blocks)
freeblocks = first[ar].next;
// Set the arena's first block to nothing again.
first[ar].next = NULL;
// Point again the arena to the first empty element
arena[ar] = &first[ar];
}
/* Testing
*/
/*
static bool pointer_in_range(void *p, void *start, void *end) {
return (p >= start && p <= end);
}
void test_alloc() {
void *a = allocate(123, MEM::PARSE);
void *b = allocate(10, MEM::PARSE);
// Verify that we're on the same block and simultaneously
// alignment.
// If alignment and allocation worked correctly, we should meet `b`
// from the start of the block + 123 rounded up to ALIGN_SIZE;
uint8_t* temp = (uint8_t*)a + roundup(123, ALIGN_SIZE);
assert(temp == b);
block_t *ap = (block_t*) ((uint8_t*)a - sizeof(block_t));
assert(((ap->avail - roundup(10, ALIGN_SIZE)) - (uint8_t*)a) == roundup(123, ALIGN_SIZE));
// This should make it allocate different block
void *c = allocate(BLOCK_TAIL, MEM::PARSE);
assert(!pointer_in_range(c, ap, (uint8_t*)ap +
sizeof(block_t) + roundup(123, ALIGN_SIZE) + BLOCK_TAIL));
// Deallocate the arena
deallocate(MEM::PARSE);
// Verify that the freeblocks list contains exactly 2 blocks.
assert(freeblocks != NULL);
int count = 0;
block_t *runner = freeblocks;
while (runner != NULL) {
++count;
runner = runner->next;
}
assert(count == 2);
// Verify that I can re-allocate in 0.
void *d = allocate(40, MEM::PARSE);
assert(d != NULL);
// Use a new arena but verify that we will take the second block
// of `freeblocks`. This is the one we got with `c`.
void *e = allocate(BLOCK_TAIL, MEM::TYPECHECK);
assert(e == c);
}
*/