3 #include <auto_quota.h>
7 #include "lock_guard.h"
11 class Mem_region_map_base;
14 template<typename Q> class Kmem_q_alloc;
21 typedef Buddy_alloc Alloc;
23 typedef Spin_lock<> Lock;
26 static unsigned long _orig_free;
27 static Kmem_alloc *_alloc;
31 class Kmem_alloc_reaper : public cxx::S_list_item
33 size_t (*_reap)(bool desperate);
36 typedef cxx::S_list_bss<Kmem_alloc_reaper> Reaper_list;
37 static Reaper_list mem_reapers;
44 Kmem_q_alloc(Q *q, Kmem_alloc *a) : _a(a), _q(q) {}
45 bool valid() const { return _a && _q; }
46 void *alloc(unsigned long size) const
48 Auto_quota<Q> q(_q, size);
53 if (EXPECT_FALSE(!(b=_a->unaligned_alloc(size))))
60 void free(void *block, unsigned long size) const
62 _a->unaligned_free(size, block);
67 Phys_mem_addr::Value to_phys(V v) const
68 { return _a->to_phys(v); }
83 #include "mem_layout.h"
84 #include "mem_region.h"
85 #include "buddy_alloc.h"
88 static Kmem_alloc::Alloc _a;
89 Kmem_alloc::Alloc *Kmem_alloc::a = &_a;
90 unsigned long Kmem_alloc::_orig_free;
91 Kmem_alloc::Lock Kmem_alloc::lock;
92 Kmem_alloc* Kmem_alloc::_alloc;
94 PUBLIC static inline NEEDS[<cassert>]
96 Kmem_alloc::allocator()
98 assert (_alloc /* uninitialized use of Kmem_alloc */);
103 PUBLIC template<typename Q> static inline NEEDS[<cassert>]
105 Kmem_alloc::q_allocator(Q *quota)
107 assert (_alloc /* uninitialized use of Kmem_alloc */);
108 return Kmem_q_alloc<Q>(quota, _alloc);
113 Kmem_alloc::allocator(Kmem_alloc *a)
118 PUBLIC static FIASCO_INIT
122 static Kmem_alloc al;
123 Kmem_alloc::allocator(&al);
128 Kmem_alloc::dump() const
131 PUBLIC inline NEEDS [Kmem_alloc::unaligned_alloc]
133 Kmem_alloc::alloc(size_t o)
135 return unaligned_alloc(1UL << o);
139 PUBLIC inline NEEDS [Kmem_alloc::unaligned_free]
141 Kmem_alloc::free(size_t o, void *p)
143 unaligned_free(1UL << o, p);
148 Kmem_alloc::unaligned_alloc(unsigned long size)
150 assert(size >=8 /*NEW INTERFACE PARANIOIA*/);
154 auto guard = lock_guard(lock);
155 ret = a->alloc(size);
160 Kmem_alloc_reaper::morecore (/* desperate= */ true);
162 auto guard = lock_guard(lock);
163 ret = a->alloc(size);
171 Kmem_alloc::unaligned_free(unsigned long size, void *page)
173 assert(size >=8 /*NEW INTERFACE PARANIOIA*/);
174 auto guard = lock_guard(lock);
179 PRIVATE static FIASCO_INIT
181 Kmem_alloc::create_free_map(Kip const *kip, Mem_region_map_base *map)
183 unsigned long available_size = 0;
184 Mem_desc const *md = kip->mem_descs();
185 Mem_desc const *const md_end = md + kip->num_mem_descs();
187 for (; md < md_end; ++md)
191 const_cast<Mem_desc*>(md)->type(Mem_desc::Undefined);
195 if (md->is_virtual())
198 unsigned long s = md->start();
199 unsigned long e = md->end();
201 // Sweep out stupid descriptors (that have the end before the start)
205 case Mem_desc::Conventional:
206 s = (s + Config::PAGE_SIZE - 1) & ~(Config::PAGE_SIZE - 1);
207 e = ((e + 1) & ~(Config::PAGE_SIZE - 1)) - 1;
210 available_size += e - s + 1;
211 if (!map->add(Mem_region(s, e)))
212 panic("Kmem_alloc::create_free_map(): memory map too small");
214 case Mem_desc::Reserved:
215 case Mem_desc::Dedicated:
216 case Mem_desc::Shared:
218 case Mem_desc::Bootloader:
219 s = s & ~(Config::PAGE_SIZE - 1);
220 e = ((e + Config::PAGE_SIZE) & ~(Config::PAGE_SIZE - 1)) - 1;
221 if (!map->sub(Mem_region(s, e)))
222 panic("Kmem_alloc::create_free_map(): memory map too small");
229 return available_size;
233 PUBLIC template< typename Q >
236 Kmem_alloc::q_alloc(Q *quota, size_t order)
238 Auto_quota<Q> q(quota, 1UL<<order);
239 if (EXPECT_FALSE(!q))
242 void *b = alloc(order);
243 if (EXPECT_FALSE(!b))
250 PUBLIC template< typename Q >
253 Kmem_alloc::q_unaligned_alloc(Q *quota, size_t size)
255 Auto_quota<Q> q(quota, size);
256 if (EXPECT_FALSE(!q))
260 if (EXPECT_FALSE(!(b=unaligned_alloc(size))))
268 PUBLIC inline NEEDS["mem_layout.h"]
269 void Kmem_alloc::free_phys(size_t s, Address p)
271 void *va = (void*)Mem_layout::phys_to_pmem(p);
272 if((unsigned long)va != ~0UL)
276 PUBLIC template< typename Q >
279 Kmem_alloc::q_free_phys(Q *quota, size_t order, Address obj)
281 free_phys(order, obj);
282 quota->free(1UL<<order);
285 PUBLIC template< typename Q >
288 Kmem_alloc::q_free(Q *quota, size_t order, void *obj)
291 quota->free(1UL<<order);
294 PUBLIC template< typename Q >
297 Kmem_alloc::q_unaligned_free(Q *quota, size_t size, void *obj)
299 unaligned_free(size, obj);
307 Kmem_alloc_reaper::Reaper_list Kmem_alloc_reaper::mem_reapers;
309 PUBLIC inline NEEDS["atomic.h"]
310 Kmem_alloc_reaper::Kmem_alloc_reaper(size_t (*reap)(bool desperate))
313 mem_reapers.add(this, mp_cas<cxx::S_list_item*>);
318 Kmem_alloc_reaper::morecore(bool desperate = false)
322 for (Reaper_list::Const_iterator reaper = mem_reapers.begin();
323 reaper != mem_reapers.end(); ++reaper)
325 freed += reaper->_reap(desperate);