3 #include "mem_layout.h"
10 static void *alloc(unsigned long) { return 0; }
11 static void free(void *) {}
12 static bool valid() { return false; }
15 template< typename _Head, typename _Tail >
24 template< typename _T, unsigned _Level >
30 static unsigned shift(unsigned)
31 { return Traits::Shift; }
33 static Address addr(unsigned, Mword entry)
35 struct E : public Traits::Entry
36 { E(Mword raw) { Traits::Entry::_raw = raw; } };
37 return E(entry).addr();
40 static unsigned size(unsigned)
41 { return Traits::Size; }
43 static unsigned length(unsigned)
44 { return 1UL << Traits::Size; }
46 static Address index(unsigned /*level*/, Address addr)
47 { return (addr >> Traits::Shift) & ((1UL << Traits::Size)-1); }
51 template< typename _Head, typename _Tail, unsigned _Level >
52 class Level< List<_Head, _Tail>, _Level >
55 typedef typename Level<_Tail, _Level - 1>::Traits Traits;
57 static unsigned shift(unsigned level)
62 return Level<_Tail, _Level - 1>::shift(level - 1);
65 static Address addr(unsigned level, Mword entry)
67 struct E : public Traits::Entry
68 { E(Mword raw) { Traits::Entry::_raw = raw; } };
70 return E(entry).addr();
72 return Level<_Tail, _Level - 1>::addr(level - 1, entry);
75 static unsigned size(unsigned level)
80 return Level<_Tail, _Level - 1>::size(level - 1);
83 static unsigned length(unsigned level)
86 return 1UL << _Head::Size;
88 return Level<_Tail, _Level - 1>::length(level - 1);
91 static Address index(unsigned level, Address addr)
94 return (addr >> Traits::Shift) & ((1UL << Traits::Size)-1);
96 return Level<_Tail, _Level - 1>::index(level - 1, addr);
101 template< typename _Head, typename _Tail>
102 class Level< List<_Head, _Tail>, 0> : public Level<_Head, 0>
106 template< typename _Traits >
110 typedef typename _Traits::Entry Entry;
113 Length = 1UL << _Traits::Size,
114 Size = _Traits::Size,
115 Mask = _Traits::Mask,
116 Shift = _Traits::Shift,
122 static unsigned idx(Address virt)
125 return (virt >> Shift) & ~(~0UL << Size);
127 return (virt >> Shift);
130 Entry &operator [] (unsigned idx) { return _e[idx]; }
131 Entry const &operator [] (unsigned idx) const { return _e[idx]; }
133 void clear() { for (unsigned i=0; i < Length; ++i) _e[i].clear(); }
137 template< typename _Last, typename Iter >
143 typedef typename _Last::Entry Entry;
144 typedef _Last Traits;
147 typedef Walk<_Last, Iter> This;
148 typedef Entry_vec<Level> Vec;
152 void clear() { _e.clear(); }
154 template< typename _Alloc >
155 Iter walk(Address virt, unsigned, _Alloc const &)
156 { return Iter(&_e[Vec::idx(virt)], Level::Shift); }
158 void unmap(Address &start, unsigned long &size, unsigned)
160 unsigned idx = Vec::idx(start);
161 unsigned cnt = size >> Traits::Shift;
162 if (cnt + idx > Vec::Length)
163 cnt = Vec::Length - idx;
164 unsigned const e = idx + cnt;
166 for (; idx != e; ++idx)
169 start += (unsigned long)cnt << Traits::Shift;
170 size -= (unsigned long)cnt << Traits::Shift;
173 template< typename _Alloc >
174 void map(Address &phys, Address &virt, unsigned long &size,
175 unsigned long attr, unsigned, _Alloc const &)
177 unsigned idx = Vec::idx(virt);
178 unsigned cnt = size >> Traits::Shift;
179 if (cnt + idx > Vec::Length)
180 cnt = Vec::Length - idx;
181 unsigned const e = idx + cnt;
183 for (; idx != e; ++idx, phys += (1UL << Traits::Entry::Page_shift))
184 _e[idx].set(phys, false, true, attr);
185 virt += (unsigned long)cnt << Traits::Shift;
186 size -= (unsigned long)cnt << Traits::Shift;
189 template< typename _Alloc >
190 void destroy(Address, Address, unsigned, _Alloc const &)
193 template< typename _Alloc >
194 bool sync(Address &l_addr, This const &_r, Address &r_addr,
195 Address &size, unsigned, _Alloc const &)
197 unsigned count = size >> Traits::Shift;
198 unsigned const l = Vec::idx(l_addr);
199 unsigned const r = Vec::idx(r_addr);
200 unsigned const m = l > r ? l : r;
201 if (m + count >= Vec::Length)
202 count = Vec::Length - m;
205 Entry const *re = &_r._e[r];
207 bool need_flush = false;
209 for (unsigned n = count; n > 0; --n)
214 // This loop seems unnecessary, but remote_update is also used for
215 // updating the long IPC window.
216 // Now consider following scenario with super pages:
217 // Sender A makes long IPC to receiver B.
218 // A setups the IPC window by reading the pagedir slot from B in an
219 // temporary register. Now the sender is preempted by C. Then C unmaps
220 // the corresponding super page from B. C switch to A back, using
221 // switch_to, which clears the IPC window pde slots from A. BUT then A
222 // write the content of the temporary register, which contain the now
223 // invalid pde slot, in his own page directory and starts the long IPC.
224 // Because no pagefault will happen, A will write to now invalid memory.
225 // So we compare after storing the pde slot, if the copy is still
226 // valid. And this solution is much faster than grabbing the cpu lock,
227 // when updating the ipc window.h
230 typename Traits::Raw const volatile *rr
231 = reinterpret_cast<typename Traits::Raw const *>(re + n - 1);
232 le[n - 1] = *(Entry *)rr;
233 if (EXPECT_TRUE(le[n - 1].raw() == *rr))
238 l_addr += (unsigned long)count << Traits::Shift;
239 r_addr += (unsigned long)count << Traits::Shift;
240 size -= (unsigned long)count << Traits::Shift;
247 template< typename _Head, typename _Tail, typename Iter >
248 class Walk <List <_Head,_Tail>, Iter >
251 typedef Walk<_Tail, Iter> Next;
252 typedef typename Next::Level Level;
253 typedef typename _Head::Entry Entry;
254 typedef _Head Traits;
256 enum { Depth = Next::Depth + 1 };
259 typedef Walk<_Head, Iter> This;
260 typedef Walk< List< _Head, _Tail >, Iter> This2;
261 typedef Entry_vec<_Head> Vec;
264 template< typename _Alloc >
265 Next *alloc_next(Entry *e, _Alloc const &a)
267 Next *n = (Next*)a.alloc(sizeof(Next));
268 if (EXPECT_FALSE(!n))
272 e->set(Mem_layout::pmem_to_phys(n), true, true);
278 void clear() { _e.clear(); }
280 template< typename _Alloc >
281 Iter walk(Address virt, unsigned level, _Alloc const &alloc)
283 Entry *e = &_e[Vec::idx(virt)];
285 return Iter(e, _Head::Shift);
286 else if (!e->valid())
289 if (alloc.valid() && (n = alloc_next(e, alloc)))
290 return n->walk(virt, level - 1, alloc);
292 return Iter(e, _Head::Shift);
296 if (_Head::May_be_leaf && e->leaf())
297 return Iter(e, _Head::Shift);
300 Next *n = (Next*)Mem_layout::phys_to_pmem(e->addr());
301 return n->walk(virt, level - 1, alloc);
305 void unmap(Address &start, unsigned long &size, unsigned level)
309 reinterpret_cast<This*>(this)->unmap(start, size, 0);
315 Entry *e = &_e[Vec::idx(start)];
317 if (!e->valid() || e->leaf())
320 Next *n = (Next*)Mem_layout::phys_to_pmem(e->addr());
321 n->unmap(start, size, level - 1);
325 template< typename _Alloc >
326 void map(Address &phys, Address &virt, unsigned long &size,
327 unsigned long attr, unsigned level, _Alloc const &alloc)
331 reinterpret_cast<This*>(this)->map(phys, virt, size, attr, 0, alloc);
337 Entry *e = &_e[Vec::idx(virt)];
341 if (alloc.valid() && (n = alloc_next(e, alloc)))
342 n->map(phys, virt, size, attr, level - 1, alloc);
347 if (_Head::May_be_leaf && e->leaf())
350 n = (Next*)Mem_layout::phys_to_pmem(e->addr());
351 n->map(phys, virt, size, attr, level - 1, alloc);
355 template< typename _Alloc >
356 void destroy(Address start, Address end, unsigned level, _Alloc const &alloc)
361 unsigned idx_start = Vec::idx(start);
362 unsigned idx_end = Vec::idx(end + (1UL << Traits::Shift) - 1);
363 unsigned idx = idx_start;
365 for (; idx < idx_end; ++idx)
368 if (!e->valid() || (_Head::May_be_leaf && e->leaf()))
371 Next *n = (Next*)Mem_layout::phys_to_pmem(e->addr());
373 n->destroy(idx > idx_start ? 0 : start,
374 idx + 1 < idx_end ? 1UL << Next::Traits::Shift : end,
377 alloc.free(n, sizeof(Next));
381 template< typename _Alloc >
382 bool sync(Address &l_a, This2 const &_r, Address &r_a,
383 Address &size, unsigned level, _Alloc const &alloc)
386 return reinterpret_cast<This*>(this)
387 ->sync(l_a, reinterpret_cast<This const &>(_r), r_a, size, 0, alloc);
389 unsigned count = size >> Traits::Shift;
391 unsigned const lx = Vec::idx(l_a);
392 unsigned const rx = Vec::idx(r_a);
393 unsigned const mx = lx > rx ? lx : rx;
394 if (mx + count >= Vec::Length)
395 count = Vec::Length - mx;
398 bool need_flush = false;
400 for (unsigned i = count; size && i > 0; --i) //while (size)
402 Entry *l = &_e[Vec::idx(l_a)];
403 Entry const *r = &_r._e[Vec::idx(r_a)];
406 || (!l->valid() && (!alloc.valid()
407 || !(n = alloc_next(l,alloc)))))
409 l_a += 1UL << Traits::Shift;
410 r_a += 1UL << Traits::Shift;
411 if (size > 1UL << Traits::Shift)
413 size -= 1UL << Traits::Shift;
420 n = (Next*)Mem_layout::phys_to_pmem(l->addr());
422 Next *rn = (Next*)Mem_layout::phys_to_pmem(r->addr());
424 if (n->sync(l_a, *rn, r_a, size, level - 1, alloc))
433 template< typename _E, unsigned _Va_shift >
437 Iter(_E *e, unsigned char shift) : e(e), s(shift + _Va_shift) {}
439 template< typename _I2 >
440 Iter(_I2 const &o) : e(o.e), s(o.s) {}
442 unsigned char shift() const { return s; }
443 unsigned long addr() const { return e->addr() & (~0UL << s); }
460 typedef _Entry Entry;
461 typedef typename Entry::Raw Raw;
465 May_be_leaf = _May_be_leaf,
472 template< typename _T, unsigned _Shift >
476 typedef _T Orig_list;
478 < typename _T::Entry,
486 template< typename _Head, typename _Tail, unsigned _Shift >
487 class Shift< List<_Head, _Tail>, _Shift >
490 typedef Ptab::List<_Head, _Tail> Orig_list;
493 typename Shift<_Head, _Shift>::List,
494 typename Shift<_Tail, _Shift>::List
502 typedef Address Value_type;
503 static Address val(Address a) { return a; }
506 template< typename N, int SHIFT >
510 enum { Shift = SHIFT };
511 typedef N Value_type;
512 static Address val(N a) { return a.value(); }
517 typename _Base_entry,
519 typename _Addr = Address_wrap
524 typedef typename _Addr::Value_type Va;
525 typedef _Traits Traits;
526 typedef _Base_entry Entry;
527 typedef Ptab::Iter<Entry, _Addr::Shift> Iter;
528 typedef typename _Traits::Head L0;
531 typedef Base<_Base_entry, _Traits> This;
532 typedef Ptab::Walk<_Traits, Iter> Walk;
535 enum { Depth = Walk::Depth };
537 template< typename _Alloc >
538 Iter walk(Va virt, unsigned level, _Alloc const &alloc)
540 return _base.walk(_Addr::val(virt), level, alloc);
543 Iter walk(Va virt, unsigned level = 100) const
545 return const_cast<Walk&>(_base).walk(_Addr::val(virt), level, Null_alloc());
549 template< typename _Alloc >
550 bool sync(Va l_addr, Base< _Base_entry, _Traits, _Addr> const *_r,
551 Va r_addr, Va size, unsigned level = 100,
552 _Alloc const &alloc = _Alloc())
554 Address la = _Addr::val(l_addr);
555 Address ra = _Addr::val(r_addr);
556 Address sz = _Addr::val(size);
557 return _base.sync(la, _r->_base,
558 ra, sz, level, alloc);
561 void clear() { _base.clear(); }
563 void unmap(Va virt, Va size, unsigned level)
565 Address va = _Addr::val(virt);
566 unsigned long sz = _Addr::val(size);
567 _base.unmap(va, sz, level);
570 template< typename _Alloc >
571 void map(Address phys, Va virt, Va size, unsigned long attr,
572 unsigned level, _Alloc const &alloc = _Alloc())
574 Address va = _Addr::val(virt);
575 unsigned long sz = _Addr::val(size);
576 _base.map(phys, va, sz, attr, level, alloc);
579 template< typename _Alloc >
580 void destroy(Va start, Va end, unsigned level, _Alloc const &alloc = _Alloc())
581 { _base.destroy(_Addr::val(start), _Addr::val(end), level, alloc); }
584 template< typename _New_alloc >
585 Base<_Base_entry, _Traits, _New_alloc, _Addr> *alloc_cast()
586 { return reinterpret_cast<Base<_Base_entry, _Traits, _New_alloc, _Addr> *>(this); }
588 template< typename _New_alloc >
589 Base<_Base_entry, _Traits, _New_alloc, _Addr> const *alloc_cast() const
590 { return reinterpret_cast<Base<_Base_entry, _Traits, _New_alloc, _Addr> const *>(this); }