L4Re Operating System Framework
Interface and Usage Documentation
Loading...
Searching...
No Matches
list_alloc
1// vim:set ft=cpp: -*- Mode: C++ -*-
2/*
3 * (c) 2008-2009 Alexander Warg <warg@os.inf.tu-dresden.de>,
4 * Torsten Frenzel <frenzel@os.inf.tu-dresden.de>
5 * economic rights: Technische Universität Dresden (Germany)
6 *
7 * License: see LICENSE.spdx (in this directory or the directories above)
8 */
9
10#pragma once
11
12#include <l4/cxx/arith>
13#include <l4/cxx/minmax>
14#include <l4/cxx/type_traits>
15#include <l4/sys/consts.h>
16
17namespace cxx {
18
23{
24private:
25 friend class List_alloc_sanity_guard;
26
27 struct Mem_block
28 {
29 Mem_block *next;
30 unsigned long size;
31 };
32
33 Mem_block *_first;
34
35 inline void check_overlap(void *, unsigned long );
36 inline void sanity_check_list(char const *, char const *);
37 inline void merge();
38
39public:
40
47 List_alloc() : _first(0) {}
48
61 inline void free(void *block, unsigned long size, bool initial_free = false);
62
77 inline void *alloc(unsigned long size, unsigned long align,
78 unsigned long lower = 0, unsigned long upper = ~0UL);
79
100 inline void *alloc_max(unsigned long min, unsigned long *max,
101 unsigned long align, unsigned granularity,
102 unsigned long lower = 0, unsigned long upper = ~0UL);
103
109 inline unsigned long avail();
110
111 template <typename DBG>
112 void dump_free_list(DBG &out);
113};
114
115#if !defined (CXX_LIST_ALLOC_SANITY)
116class List_alloc_sanity_guard
117{
118public:
119 List_alloc_sanity_guard(List_alloc *, char const *)
120 {}
121
122};
123
124
125void
126List_alloc::check_overlap(void *, unsigned long )
127{}
128
129void
130List_alloc::sanity_check_list(char const *, char const *)
131{}
132
133#else
134
135class List_alloc_sanity_guard
136{
137private:
138 List_alloc *a;
139 char const *func;
140
141public:
142 List_alloc_sanity_guard(List_alloc *a, char const *func)
143 : a(a), func(func)
144 { a->sanity_check_list(func, "entry"); }
145
146 ~List_alloc_sanity_guard()
147 { a->sanity_check_list(func, "exit"); }
148};
149
150void
151List_alloc::check_overlap(void *b, unsigned long s)
152{
153 unsigned long const mb_align = (1UL << arith::Ld<sizeof(Mem_block)>::value) - 1;
154 if ((unsigned long)b & mb_align)
155 {
156 L4::cerr << "List_alloc(FATAL): trying to free unaligned memory: "
157 << b << " align=" << arith::Ld<sizeof(Mem_block)>::value << "\n";
158 }
159
160 Mem_block *c = _first;
161 for (;c ; c = c->next)
162 {
163 unsigned long x_s = (unsigned long)b;
164 unsigned long x_e = x_s + s;
165 unsigned long b_s = (unsigned long)c;
166 unsigned long b_e = b_s + c->size;
167
168 if ((x_s >= b_s && x_s < b_e)
169 || (x_e > b_s && x_e <= b_e)
170 || (b_s >= x_s && b_s < x_e)
171 || (b_e > x_s && b_e <= x_e))
172 {
173 L4::cerr << "List_alloc(FATAL): trying to free memory that "
174 "is already free: \n ["
175 << (void*)x_s << '-' << (void*)x_e << ") overlaps ["
176 << (void*)b_s << '-' << (void*)b_e << ")\n";
177 }
178 }
179}
180
181void
182List_alloc::sanity_check_list(char const *func, char const *info)
183{
184 Mem_block *c = _first;
185 for (;c ; c = c->next)
186 {
187 if (c->next)
188 {
189 if (c >= c->next)
190 {
191 L4::cerr << "List_alloc(FATAL): " << func << '(' << info
192 << "): list order violation\n";
193 }
194
195 if (((unsigned long)c) + c->size > (unsigned long)c->next)
196 {
197 L4::cerr << "List_alloc(FATAL): " << func << '(' << info
198 << "): list order violation\n";
199 }
200 }
201 }
202}
203
204#endif
205
206void
207List_alloc::merge()
208{
209 List_alloc_sanity_guard __attribute__((unused)) guard(this, __func__);
210 Mem_block *c = _first;
211 while (c && c->next)
212 {
213 unsigned long f_start = reinterpret_cast<unsigned long>(c);
214 unsigned long f_end = f_start + c->size;
215 unsigned long n_start = reinterpret_cast<unsigned long>(c->next);
216
217 if (f_end == n_start)
218 {
219 c->size += c->next->size;
220 c->next = c->next->next;
221 continue;
222 }
223
224 c = c->next;
225 }
226}
227
228void
229List_alloc::free(void *block, unsigned long size, bool initial_free)
230{
231 List_alloc_sanity_guard __attribute__((unused)) guard(this, __func__);
232
233 unsigned long const mb_align = (1UL << arith::Ld<sizeof(Mem_block)>::value) - 1;
234
235 if (initial_free)
236 {
237 // enforce alignment constraint on initial memory
238 unsigned long nblock = (reinterpret_cast<unsigned long>(block) + mb_align)
239 & ~mb_align;
240 size = (size - (nblock - reinterpret_cast<unsigned long>(block)))
241 & ~mb_align;
242 block = reinterpret_cast<void*>(nblock);
243 }
244 else
245 // blow up size to the minimum aligned size
246 size = (size + mb_align) & ~mb_align;
247
248 check_overlap(block, size);
249
250 Mem_block **c = &_first;
251 Mem_block *next = 0;
252
253 if (*c)
254 {
255 while (*c && *c < block)
256 c = &(*c)->next;
257
258 next = *c;
259 }
260
261 *c = reinterpret_cast<Mem_block*>(block);
262
263 (*c)->next = next;
264 (*c)->size = size;
265
266 merge();
267}
268
269void *
270List_alloc::alloc_max(unsigned long min, unsigned long *max, unsigned long align,
271 unsigned granularity, unsigned long lower,
272 unsigned long upper)
273{
274 List_alloc_sanity_guard __attribute__((unused)) guard(this, __func__);
275
276 unsigned char const mb_bits = arith::Ld<sizeof(Mem_block)>::value;
277 unsigned long const mb_align = (1UL << mb_bits) - 1;
278
279 // blow minimum up to at least the minimum aligned size of a Mem_block
280 min = l4_round_size(min, mb_bits);
281 // truncate maximum to at least the size of a Mem_block
282 *max = l4_trunc_size(*max, mb_bits);
283 // truncate maximum size according to granularity
284 *max = *max & ~(granularity - 1UL);
285
286 if (min > *max)
287 return 0;
288
289 unsigned long almask = align ? (align - 1UL) : 0;
290
291 // minimum alignment is given by the size of a Mem_block
292 if (almask < mb_align)
293 almask = mb_align;
294
295 Mem_block **c = &_first;
296 Mem_block **fit = 0;
297 unsigned long max_fit = 0;
298 unsigned long a_lower = (lower + almask) & ~almask;
299
300 for (; *c; c = &(*c)->next)
301 {
302 // address of free memory block
303 unsigned long n_start = reinterpret_cast<unsigned long>(*c);
304
305 // block too small, next
306 // XXX: maybe we can skip this and just do the test below
307 if ((*c)->size < min)
308 continue;
309
310 // block outside region, next
311 if (upper < n_start || a_lower > n_start + (*c)->size)
312 continue;
313
314 // aligned start address within the free block
315 unsigned long a_start = (n_start + almask) & ~almask;
316
317 // check if aligned start address is behind the block, next
318 if (a_start - n_start >= (*c)->size)
319 continue;
320
321 a_start = a_start < a_lower ? a_lower : a_start;
322
323 // end address would overflow, next
324 if (min > ~0UL - a_start)
325 continue;
326
327 // block outside region, next
328 if (a_start + min - 1UL > upper)
329 continue;
330
331 // remaining size after subtracting the padding for the alignment
332 unsigned long r_size = (*c)->size - a_start + n_start;
333
334 // upper limit can limit maximum size
335 if (a_start + r_size - 1UL > upper)
336 r_size = upper - a_start + 1UL;
337
338 // round down according to granularity
339 r_size &= ~(granularity - 1UL);
340
341 // block too small
342 if (r_size < min)
343 continue;
344
345 if (r_size >= *max)
346 {
347 fit = c;
348 max_fit = *max;
349 break;
350 }
351
352 if (r_size > max_fit)
353 {
354 max_fit = r_size;
355 fit = c;
356 }
357 }
358
359 if (fit)
360 {
361 unsigned long n_start = reinterpret_cast<unsigned long>(*fit);
362 unsigned long a_lower = (lower + almask) & ~almask;
363 unsigned long a_start = (n_start + almask) & ~almask;
364 a_start = a_start < a_lower ? a_lower : a_start;
365 unsigned long r_size = (*fit)->size - a_start + n_start;
366
367 if (a_start > n_start)
368 {
369 (*fit)->size -= r_size;
370 fit = &(*fit)->next;
371 }
372 else
373 *fit = (*fit)->next;
374
375 *max = max_fit;
376 if (r_size == max_fit)
377 return reinterpret_cast<void *>(a_start);
378
379 Mem_block *m = reinterpret_cast<Mem_block*>(a_start + max_fit);
380 m->next = *fit;
381 m->size = r_size - max_fit;
382 *fit = m;
383 return reinterpret_cast<void *>(a_start);
384 }
385
386 return 0;
387}
388
389void *
390List_alloc::alloc(unsigned long size, unsigned long align, unsigned long lower,
391 unsigned long upper)
392{
393 List_alloc_sanity_guard __attribute__((unused)) guard(this, __func__);
394
395 unsigned long const mb_align
396 = (1UL << arith::Ld<sizeof(Mem_block)>::value) - 1;
397
398 // blow up size to the minimum aligned size
399 size = (size + mb_align) & ~mb_align;
400
401 unsigned long almask = align ? (align - 1UL) : 0;
402
403 // minimum alignment is given by the size of a Mem_block
404 if (almask < mb_align)
405 almask = mb_align;
406
407 Mem_block **c = &_first;
408 unsigned long a_lower = (lower + almask) & ~almask;
409
410 for (; *c; c=&(*c)->next)
411 {
412 // address of free memory block
413 unsigned long n_start = reinterpret_cast<unsigned long>(*c);
414
415 // block too small, next
416 // XXX: maybe we can skip this and just do the test below
417 if ((*c)->size < size)
418 continue;
419
420 // block outside region, next
421 if (upper < n_start || a_lower > n_start + (*c)->size)
422 continue;
423
424 // aligned start address within the free block
425 unsigned long a_start = (n_start + almask) & ~almask;
426
427 // block too small after alignment, next
428 if (a_start - n_start >= (*c)->size)
429 continue;
430
431 a_start = a_start < a_lower ? a_lower : a_start;
432
433 // end address would overflow, next
434 if (size > ~0UL - a_start)
435 continue;
436
437 // block outside region, next
438 if (a_start + size - 1UL > upper)
439 continue;
440
441 // remaining size after subtracting the padding
442 // for the alignment
443 unsigned long r_size = (*c)->size - a_start + n_start;
444
445 // block too small
446 if (r_size < size)
447 continue;
448
449 if (a_start > n_start)
450 {
451 // have free space before the allocated block
452 // shrink the block and set c to the next pointer of that
453 // block
454 (*c)->size -= r_size;
455 c = &(*c)->next;
456 }
457 else
458 // drop the block, c remains the next pointer of the
459 // previous block
460 *c = (*c)->next;
461
462 // allocated the whole remaining space
463 if (r_size == size)
464 return reinterpret_cast<void*>(a_start);
465
466 // add a new free block behind the allocated block
467 Mem_block *m = reinterpret_cast<Mem_block*>(a_start + size);
468 m->next = *c;
469 m->size = r_size - size;
470 *c = m;
471 return reinterpret_cast<void *>(a_start);
472 }
473
474 return 0;
475}
476
477unsigned long
479{
480 List_alloc_sanity_guard __attribute__((unused)) guard(this, __FUNCTION__);
481 Mem_block *c = _first;
482 unsigned long a = 0;
483 while (c)
484 {
485 a += c->size;
486 c = c->next;
487 }
488
489 return a;
490}
491
492template <typename DBG>
493void
494List_alloc::dump_free_list(DBG &out)
495{
496 Mem_block *c = _first;
497 while (c)
498 {
499 static constexpr char const *const unitstr[4] =
500 { "Byte", "KiB", "MiB", "GiB" };
501
502 unsigned sz = c->size;
503 unsigned i;
504 for (i = 0; i < cxx::array_size(unitstr) && sz > 8 << 10; ++i)
505 sz >>= 10;
506
507 out.printf("%12p - %12p (%u %s)\n",
508 c, reinterpret_cast<char *>(c) + c->size - 1, sz, unitstr[i]);
509
510 c = c->next;
511 }
512}
513
514}
unsigned long avail()
Get the amount of available memory.
Definition list_alloc:478
void * alloc_max(unsigned long min, unsigned long *max, unsigned long align, unsigned granularity, unsigned long lower=0, unsigned long upper=~0UL)
Allocate a memory block of min <= size <= max.
Definition list_alloc:270
List_alloc()
Initializes an empty list allocator.
Definition list_alloc:47
void * alloc(unsigned long size, unsigned long align, unsigned long lower=0, unsigned long upper=~0UL)
Allocate a memory block.
Definition list_alloc:390
void free(void *block, unsigned long size, bool initial_free=false)
Return a free memory block to the allocator.
Definition list_alloc:229
l4_addr_t l4_trunc_size(l4_addr_t address, unsigned char bits) L4_NOTHROW
Round an address down to the next lower flexpage with size bits.
Definition consts.h:459
l4_addr_t l4_round_size(l4_addr_t value, unsigned char bits) L4_NOTHROW
Round value up to the next alignment with bits size.
Definition consts.h:484
Common constants.
BasicOStream cerr
Standard error stream.
Our C++ library.
Definition arith:11
Computes the binary logarithm of the given number at compile time.
Definition arith:49