3 * (c) 2008-2009 Alexander Warg <warg@os.inf.tu-dresden.de>,
4 * Torsten Frenzel <frenzel@os.inf.tu-dresden.de>
5 * economic rights: Technische Universität Dresden (Germany)
7 * This file is part of TUD:OS and distributed under the terms of the
8 * GNU General Public License 2.
9 * Please see the COPYING-GPL-2 file for details.
11 * As a special exception, you may use this file as part of a free software
12 * library without restriction. Specifically, if other files instantiate
13 * templates or use macros or inline functions from this file, or you compile
14 * this file and link it with other files to produce an executable, this
15 * file does not by itself cause the resulting executable to be covered by
16 * the GNU General Public License. This exception does not however
17 * invalidate any other reasons why the executable file might be covered by
18 * the GNU General Public License.
23 #include <l4/cxx/arith>
24 #include <l4/cxx/minmax>
29 * \brief Standard list-based allocator.
34 friend class List_alloc_sanity_guard;
44 inline void check_overlap(void *, unsigned long );
45 inline void sanity_check_list(char const *, char const *);
51 * \brief Initializes an empty list allocator.
53 * \note To initialize the allocator with available memory
54 * use the #free() function.
56 List_alloc() : _first(0) {}
59 * \brief Return a free memory block to the allocator.
61 * \param block pointer to memory block
62 * \param size size of memory block
63 * \param initial_free Set to true for putting fresh memory
64 * to the allocator. This will enforce alignment on that
67 inline void free(void *block, unsigned long size, bool initial_free = false);
70 * \brief Alloc a memory block.
72 * \param size Size of the memory block
73 * \param align Alignment constraint
75 * \return Pointer to memory block
77 inline void *alloc(unsigned long size, unsigned align);
80 * \brief Allocate a memory block of `min` <= size <=`max`.
82 * \param min Minimal size to allocate.
83 * \param[in,out] max Maximum size to allocate. The actual allocated
84 * size is returned here.
85 * \param align Alignment constraint.
86 * \param granularity Granularity to use for the allocation.
88 * \return Pointer to memory block
90 inline void *alloc_max(unsigned long min, unsigned long *max,
91 unsigned align, unsigned granularity);
94 * \brief Get the amount of available memory.
96 * \return Available memory in bytes
98 inline unsigned long avail();
100 template <typename DBG>
101 void dump_free_list(DBG &out);
104 #if !defined (CXX_LIST_ALLOC_SANITY)
105 class List_alloc_sanity_guard
108 List_alloc_sanity_guard(List_alloc *, char const *)
115 List_alloc::check_overlap(void *, unsigned long )
119 List_alloc::sanity_check_list(char const *, char const *)
124 class List_alloc_sanity_guard
131 List_alloc_sanity_guard(List_alloc *a, char const *func)
133 { a->sanity_check_list(func, "entry"); }
135 ~List_alloc_sanity_guard()
136 { a->sanity_check_list(func, "exit"); }
140 List_alloc::check_overlap(void *b, unsigned long s)
142 unsigned long const mb_align = (1UL << arith::Ld<sizeof(Mem_block)>::value) - 1;
143 if ((unsigned long)b & mb_align)
145 L4::cerr << "List_alloc(FATAL): trying to free unaligned memory: "
146 << b << " align=" << arith::Ld<sizeof(Mem_block)>::value << "\n";
149 Mem_block *c = _first;
150 for (;c ; c = c->next)
152 unsigned long x_s = (unsigned long)b;
153 unsigned long x_e = x_s + s;
154 unsigned long b_s = (unsigned long)c;
155 unsigned long b_e = b_s + c->size;
157 if ( (x_s >= b_s && x_s < b_e)
158 || (x_e > b_s && x_e <= b_e)
159 || (b_s >= x_s && b_s < x_e)
160 || (b_e > x_s && b_e <= x_e))
162 L4::cerr << "List_alloc(FATAL): trying to free memory that "
163 "is already free: \n ["
164 << (void*)x_s << '-' << (void*)x_e << ") overlaps ["
165 << (void*)b_s << '-' << (void*)b_e << ")\n";
171 List_alloc::sanity_check_list(char const *func, char const *info)
173 Mem_block *c = _first;
174 for (;c ; c = c->next)
180 L4::cerr << "List_alloc(FATAL): " << func << '(' << info
181 << "): list oerder violation\n";
184 if (((unsigned long)c) + c->size > (unsigned long)c->next)
186 L4::cerr << "List_alloc(FATAL): " << func << '(' << info
187 << "): list oerder violation\n";
198 List_alloc_sanity_guard __attribute__((unused)) guard(this, __func__);
199 Mem_block *c = _first;
202 unsigned long f_start = (unsigned long)c;
203 unsigned long f_end = f_start + c->size;
204 unsigned long n_start = (unsigned long)c->next;
206 if (f_end == n_start)
208 c->size += c->next->size;
209 c->next = c->next->next;
218 List_alloc::free(void *block, unsigned long size, bool initial_free)
220 List_alloc_sanity_guard __attribute__((unused)) guard(this, __func__);
222 unsigned long const mb_align = (1UL << arith::Ld<sizeof(Mem_block)>::value) - 1;
226 // enforce alignment constraint on initial memory
227 unsigned long nblock = ((unsigned long)block + mb_align) & ~mb_align;
228 size = (size - (nblock - (unsigned long)block)) & ~mb_align;
229 block = (void*)nblock;
232 // blow up size to the minimum aligned size
233 size = (size + mb_align) & ~mb_align;
235 check_overlap(block, size);
237 Mem_block **c = &_first;
242 while (*c && *c < block)
248 *c = (Mem_block*)block;
257 List_alloc::alloc_max(unsigned long min, unsigned long *max, unsigned align,
258 unsigned granularity)
260 List_alloc_sanity_guard __attribute__((unused)) guard(this, __func__);
262 unsigned char const mb_bits = arith::Ld<sizeof(Mem_block)>::value;
263 unsigned long const mb_align = (1UL << mb_bits) - 1;
265 // blow minimum up to at least the minimum aligned size of a Mem_block
266 min = l4_round_size(min, mb_bits);
267 // truncate maximum to at least the size of a Mem_block
268 *max = l4_trunc_size(*max, mb_bits);
269 // truncate maximum size according to granularity
270 *max = *max & ~(granularity - 1UL);
275 unsigned long almask = align ? (align - 1) : 0;
277 // minimum alignment is given by the size of a Mem_block
278 if (almask < mb_align)
281 Mem_block **c = &_first;
283 unsigned long max_fit = 0;
285 for (; *c; c = &(*c)->next)
287 // address of free memory block
288 unsigned long n_start = (unsigned long)(*c);
290 // block too small, next
291 // XXX: maybe we can skip this and just do the test below
292 if ((*c)->size < min)
295 // aligned start address within the free block
296 unsigned long a_start = (n_start + almask) & ~almask;
298 // check if aligned start address is behind the block, next
299 if (a_start - n_start >= (*c)->size)
302 // remaining size after subtracting the padding for the alignment
303 unsigned long r_size = (*c)->size - a_start + n_start;
304 // round down according to granularity
305 r_size &= ~(granularity - 1UL);
318 if (r_size > max_fit)
327 unsigned long n_start = (unsigned long)(*fit);
328 unsigned long a_start = (n_start + almask) & ~almask;
329 unsigned long r_size = (*fit)->size - a_start + n_start;
331 if (a_start > n_start)
333 (*fit)->size -= r_size;
340 if (r_size == max_fit)
341 return (void *)a_start;
343 Mem_block *m = (Mem_block*)(a_start + max_fit);
345 m->size = r_size - max_fit;
347 return (void*)a_start;
354 List_alloc::alloc(unsigned long size, unsigned align)
356 List_alloc_sanity_guard __attribute__((unused)) guard(this, __func__);
358 unsigned long const mb_align = (1UL << arith::Ld<sizeof(Mem_block)>::value) - 1;
360 // blow up size to the minimum aligned size
361 size = (size + mb_align) & ~mb_align;
363 unsigned long almask = align ? (align -1) : 0;
365 // minimum alignment is given by the size of a Mem_block
366 if (almask < mb_align)
369 Mem_block **c = &_first;
371 for (; *c; c=&(*c)->next)
373 // address of free memory block
374 unsigned long n_start = (unsigned long)(*c);
376 // block too small, next
377 // XXX: maybe we can skip this and just do the test below
378 if ((*c)->size < size)
381 // aligned start address within the free block
382 unsigned long a_start = (n_start + almask) & ~almask;
384 // hm, block too small after alignment, next
385 if (a_start - n_start >= (*c)->size)
388 // remaining size after subtracting the padding
390 unsigned long r_size = (*c)->size - a_start + n_start;
396 if (a_start > n_start)
398 // have free space before the allocated block
399 // shrink the block and set c to the next pointer of that
401 (*c)->size -= r_size;
405 // drop the block, c remains the next pointer of the
409 // allocated the whole remaining space
411 return (void*)a_start;
413 // add a new free block behind the allocated block
414 Mem_block *m = (Mem_block*)(a_start + size);
416 m->size = r_size - size;
418 return (void*)a_start;
427 List_alloc_sanity_guard __attribute__((unused)) guard(this, __FUNCTION__);
428 Mem_block *c = _first;
439 template <typename DBG>
441 List_alloc::dump_free_list(DBG &out)
443 Mem_block *c = _first;
454 else if (c->size < 1 << 20)
465 out.printf("%10p - %10p (%u %s)\n", c, (char *) c + c->size - 1, sz, unit);