3 #include "mem_layout.h"
11 static void *alloc(unsigned long) { return 0; }
12 static void free(void *) {}
13 static bool valid() { return false; }
14 static unsigned to_phys(void *) { return 0; }
17 template< typename _Head, typename _Tail >
24 template< typename ...T >
27 template< typename T >
30 template< typename H, typename T >
31 struct Tupel<H, T> { typedef Ptab::List<H, T> List; };
33 template<typename T1, typename T2, typename T3, typename ...X>
34 struct Tupel<T1, T2, T3, X...>
35 { typedef Ptab::List<T1, typename Tupel<T2, T3, X...>::List > List; };
37 template< typename _T, unsigned _Level >
42 static unsigned shift(unsigned)
43 { return Traits::Shift; }
45 static unsigned size(unsigned)
46 { return Traits::Size; }
48 static unsigned length(unsigned)
49 { return 1UL << Traits::Size; }
51 static Address index(unsigned /*level*/, Address addr)
52 { return (addr >> Traits::Shift) & ((1UL << Traits::Size)-1); }
54 static unsigned entry_size(unsigned)
55 { return sizeof(typename Traits::Entry); }
59 template< typename _Head, typename _Tail, unsigned _Level >
60 struct Level< List<_Head, _Tail>, _Level >
62 typedef Level<_Tail, _Level - 1> Next_level;
65 static unsigned shift(unsigned level)
70 return Next_level::shift(level - 1);
73 static unsigned size(unsigned level)
78 return Next_level::size(level - 1);
81 static unsigned length(unsigned level)
84 return 1UL << Traits::Size;
86 return Next_level::length(level - 1);
89 static Address index(unsigned level, Address addr)
92 return (addr >> Traits::Shift) & ((1UL << Traits::Size)-1);
94 return Next_level::index(level - 1, addr);
97 static unsigned entry_size(unsigned level)
100 return sizeof(typename Traits::Entry);
102 return Next_level::entry_size(level - 1);
107 template< typename _Head, typename _Tail>
108 struct Level< List<_Head, _Tail>, 0> : Level<_Head, 0>
112 template< typename _Traits >
115 typedef typename _Traits::Entry Entry;
118 Length = 1UL << _Traits::Size,
119 Size = _Traits::Size,
120 Mask = _Traits::Mask,
121 Shift = _Traits::Shift,
127 static unsigned idx(Address virt)
130 return cxx::get_lsb(virt >> Shift, (Address)Size);
132 return (virt >> Shift);
135 Entry &operator [] (unsigned idx) { return _e[idx]; }
136 Entry const &operator [] (unsigned idx) const { return _e[idx]; }
138 template<typename PTE_PTR>
139 void clear(unsigned level, bool force_write_back)
141 for (unsigned i=0; i < Length; ++i)
142 PTE_PTR(&_e[i], level).clear();
144 if (force_write_back)
145 PTE_PTR::write_back(&_e[0], &_e[Length]);
150 template< typename _Last, typename PTE_PTR, int DEPTH = 0 >
154 enum { Max_depth = 0 };
155 enum { Depth = DEPTH };
157 typedef typename _Last::Entry Entry;
158 typedef _Last Traits;
161 typedef Walk<_Last, PTE_PTR, DEPTH> This;
162 typedef Entry_vec<Level> Vec;
166 void clear(bool force_write_back)
167 { _e.template clear<PTE_PTR>(Depth, force_write_back); }
169 template< typename _Alloc >
170 PTE_PTR walk(Address virt, unsigned, bool, _Alloc const &)
171 { return PTE_PTR(&_e[Vec::idx(virt)], Depth); }
173 void unmap(Address &start, unsigned long &size, unsigned, bool force_write_back)
175 unsigned idx = Vec::idx(start);
176 unsigned cnt = size >> Traits::Shift;
177 if (cnt + idx > Vec::Length)
178 cnt = Vec::Length - idx;
179 unsigned const e = idx + cnt;
181 for (unsigned i = idx; i != e; ++i)
182 PTE_PTR(&_e[i], Depth).clear();
184 if (force_write_back)
185 PTE_PTR::write_back(&_e[idx], &_e[e]);
187 start += (unsigned long)cnt << Traits::Shift;
188 size -= (unsigned long)cnt << Traits::Shift;
191 template< typename _Alloc >
192 void map(Address &phys, Address &virt, unsigned long &size,
193 unsigned long attr, unsigned, bool force_write_back,
196 unsigned idx = Vec::idx(virt);
197 unsigned cnt = size >> Traits::Shift;
198 if (cnt + idx > Vec::Length)
199 cnt = Vec::Length - idx;
200 unsigned const e = idx + cnt;
202 for (unsigned i = idx; i != e; ++i, phys += (1ULL << (Traits::Shift + Traits::Base_shift)))
203 PTE_PTR(&_e[i], Depth).set_page(phys, attr);
205 if (force_write_back)
206 PTE_PTR::write_back(&_e[idx], &_e[e]);
208 virt += (unsigned long)cnt << Traits::Shift;
209 size -= (unsigned long)cnt << Traits::Shift;
212 template< typename _Alloc >
213 void destroy(Address, Address, unsigned, unsigned, _Alloc const &)
216 template< typename _Alloc >
217 int sync(Address &l_addr, This const &_r, Address &r_addr,
218 Address &size, unsigned, bool force_write_back, _Alloc const &)
220 unsigned count = size >> Traits::Shift;
221 unsigned const l = Vec::idx(l_addr);
222 unsigned const r = Vec::idx(r_addr);
223 unsigned const m = l > r ? l : r;
225 if (m + count >= Vec::Length)
226 count = Vec::Length - m;
229 Entry const *re = &_r._e[r];
231 bool need_flush = false;
233 for (unsigned n = count; n > 0; --n)
235 if (PTE_PTR(&le[n-1], Depth).is_valid())
238 // This loop seems unnecessary, but remote_update is also used for
239 // updating the long IPC window.
240 // Now consider following scenario with super pages:
241 // Sender A makes long IPC to receiver B.
242 // A setups the IPC window by reading the pagedir slot from B in an
243 // temporary register. Now the sender is preempted by C. Then C unmaps
244 // the corresponding super page from B. C switch to A back, using
245 // switch_to, which clears the IPC window pde slots from A. BUT then A
246 // write the content of the temporary register, which contain the now
247 // invalid pde slot, in his own page directory and starts the long IPC.
248 // Because no pagefault will happen, A will write to now invalid memory.
249 // So we compare after storing the pde slot, if the copy is still
250 // valid. And this solution is much faster than grabbing the cpu lock,
251 // when updating the ipc window.h
254 typename Traits::Raw const volatile *rr
255 = reinterpret_cast<typename Traits::Raw const *>(re + n - 1);
256 le[n - 1] = *(Entry *)rr;
257 if (EXPECT_TRUE(le[n - 1].raw() == *rr))
261 le[n - 1] = re[n - 1];
264 if (force_write_back)
265 PTE_PTR::write_back(&le[0], &le[count]);
267 l_addr += (unsigned long)count << Traits::Shift;
268 r_addr += (unsigned long)count << Traits::Shift;
269 size -= (unsigned long)count << Traits::Shift;
276 template< typename _Head, typename _Tail, typename PTE_PTR, int DEPTH >
277 class Walk <List <_Head,_Tail>, PTE_PTR, DEPTH >
280 typedef Walk<_Tail, PTE_PTR, DEPTH + 1> Next;
281 typedef typename Next::Level Level;
282 typedef typename _Head::Entry Entry;
283 typedef _Head Traits;
285 enum { Max_depth = Next::Max_depth + 1 };
286 enum { Depth = DEPTH };
289 typedef Walk<_Head, PTE_PTR, DEPTH> This;
290 typedef Walk< List< _Head, _Tail >, PTE_PTR, DEPTH> This2;
291 typedef Entry_vec<_Head> Vec;
294 template< typename _Alloc >
295 Next *alloc_next(PTE_PTR e, _Alloc const &a, bool force_write_back)
297 Next *n = (Next*)a.alloc(sizeof(Next));
298 if (EXPECT_FALSE(!n))
301 n->clear(force_write_back);
302 e.set_next_level(a.to_phys(n));
303 e.write_back_if(force_write_back);
309 void clear(bool force_write_back)
310 { _e.template clear<PTE_PTR>(Depth, force_write_back); }
312 template< typename _Alloc >
313 PTE_PTR walk(Address virt, unsigned level, bool force_write_back, _Alloc const &alloc)
315 PTE_PTR e(&_e[Vec::idx(virt)], Depth);
319 else if (!e.is_valid())
322 if (alloc.valid() && (n = alloc_next(e, alloc, force_write_back)))
323 return n->walk(virt, level - 1, force_write_back, alloc);
327 else if (e.is_leaf())
331 Next *n = (Next*)Mem_layout::phys_to_pmem(e.next_level());
332 return n->walk(virt, level - 1, force_write_back, alloc);
336 void unmap(Address &start, unsigned long &size, unsigned level,
337 bool force_write_back)
341 reinterpret_cast<This*>(this)->unmap(start, size, 0,
348 PTE_PTR e(&_e[Vec::idx(start)], Depth);
350 if (!e.is_valid() || e.is_leaf())
353 Next *n = (Next*)Mem_layout::phys_to_pmem(e.next_level());
354 n->unmap(start, size, level - 1, force_write_back);
358 template< typename _Alloc >
359 void map(Address &phys, Address &virt, unsigned long &size,
360 unsigned long attr, unsigned level, bool force_write_back,
365 reinterpret_cast<This*>(this)->map(phys, virt, size, attr, 0,
366 force_write_back, alloc);
372 PTE_PTR e(&_e[Vec::idx(virt)], Depth);
376 if (alloc.valid() && (n = alloc_next(e, alloc, force_write_back)))
377 n->map(phys, virt, size, attr, level - 1,
378 force_write_back, alloc);
383 if (_Head::May_be_leaf && e.is_leaf())
386 n = (Next*)Mem_layout::phys_to_pmem(e.next_level());
387 n->map(phys, virt, size, attr, level - 1, force_write_back, alloc);
391 template< typename _Alloc >
392 void destroy(Address start, Address end,
393 unsigned start_level, unsigned end_level,
396 //printf("destroy: %*.s%lx-%lx lvl=%d:%d depth=%d\n", Depth*2, " ", start, end, start_level, end_level, Depth);
397 if (!alloc.valid() || Depth >= end_level)
400 unsigned idx_start = Vec::idx(start);
401 unsigned idx_end = Vec::idx(end) + 1;
402 //printf("destroy: %*.sidx: %d:%d\n", Depth*2, " ", idx_start, idx_end);
404 for (unsigned idx = idx_start; idx < idx_end; ++idx)
406 PTE_PTR e(&_e[idx], Depth);
407 if (!e.is_valid() || (_Head::May_be_leaf && e.is_leaf()))
410 Next *n = (Next*)Mem_layout::phys_to_pmem(e.next_level());
411 if (Depth < end_level)
412 n->destroy(idx > idx_start ? 0 : start,
413 idx + 1 < idx_end ? (1UL << Traits::Shift)-1 : end,
414 start_level, end_level, alloc);
415 if (Depth >= start_level)
417 //printf("destroy: %*.sfree: %p: %p(%d)\n", Depth*2, " ", this, n, sizeof(Next));
418 alloc.free(n, sizeof(Next));
423 template< typename _Alloc >
424 int sync(Address &l_a, This2 const &_r, Address &r_a,
425 Address &size, unsigned level, bool force_write_back,
429 return reinterpret_cast<This*>(this)
430 ->sync(l_a, reinterpret_cast<This const &>(_r), r_a, size, 0,
431 force_write_back, alloc);
433 unsigned count = size >> Traits::Shift;
435 unsigned const lx = Vec::idx(l_a);
436 unsigned const rx = Vec::idx(r_a);
437 unsigned const mx = lx > rx ? lx : rx;
438 if (mx + count >= Vec::Length)
439 count = Vec::Length - mx;
442 bool need_flush = false;
444 for (unsigned i = count; size && i > 0; --i) //while (size)
446 PTE_PTR l(&_e[Vec::idx(l_a)], Depth);
447 PTE_PTR r(const_cast<Entry *>(&_r._e[Vec::idx(r_a)]), Depth);
451 l_a += 1UL << Traits::Shift;
452 r_a += 1UL << Traits::Shift;
453 if (size > 1UL << Traits::Shift)
455 size -= 1UL << Traits::Shift;
463 if (!alloc.valid() || !(n = alloc_next(l, alloc, force_write_back)))
467 n = (Next*)Mem_layout::phys_to_pmem(l.next_level());
469 Next *rn = (Next*)Mem_layout::phys_to_pmem(r.next_level());
471 int err = n->sync(l_a, *rn, r_a, size, level - 1, force_write_back, alloc);
484 template< typename _E, typename PT >
487 Pte_ptr(_E *e, unsigned char level) : e(e), l(level) {}
489 template< typename _I2 >
490 Pte_ptr(_I2 const &o) : e(o.e), l(o.l) {}
492 unsigned char level() const { return l; }
494 unsigned page_order() const
495 { return PT::page_order_for_level(l); }
508 unsigned _Base_shift = 0
512 typedef _Entry Entry;
516 May_be_leaf = _May_be_leaf,
520 Base_shift = _Base_shift
524 template< typename _T, unsigned _Shift >
527 typedef _T Orig_list;
529 < typename _T::Entry,
534 _T::Base_shift + _Shift
538 template< typename _Head, typename _Tail, unsigned _Shift >
539 struct Shift< List<_Head, _Tail>, _Shift >
541 typedef Ptab::List<_Head, _Tail> Orig_list;
544 typename Shift<_Head, _Shift>::List,
545 typename Shift<_Tail, _Shift>::List
552 typedef Address Value_type;
553 static Address val(Address a) { return a; }
556 template< typename N, int SHIFT >
557 struct Page_addr_wrap
559 enum { Shift = SHIFT };
560 typedef N Value_type;
561 static typename N::Value val(N a)
562 { return N::val(a); }
564 static typename Value_type::Diff_type::Value
565 val(typename Value_type::Diff_type a)
566 { return cxx::int_value<typename Value_type::Diff_type>(a); }
573 typename _Addr = Address_wrap
578 typedef typename _Addr::Value_type Va;
579 typedef typename _Addr::Value_type::Diff_type Vs;
580 typedef _Traits Traits;
581 typedef PTE_PTR Pte_ptr;
582 typedef typename _Traits::Head L0;
586 Base_shift = L0::Base_shift,
590 typedef Ptab::Walk<_Traits, PTE_PTR> Walk;
593 enum { Depth = Walk::Max_depth };
595 typedef Level<Traits, Depth> Levels;
597 static unsigned lsb_for_level(unsigned level)
598 { return Levels::shift(level); }
600 static unsigned page_order_for_level(unsigned level)
601 { return Levels::shift(level) + Base_shift; }
603 template< typename _Alloc >
604 PTE_PTR walk(Va virt, unsigned level, bool force_write_back, _Alloc const &alloc)
605 { return _base.walk(_Addr::val(virt), level, force_write_back, alloc); }
607 PTE_PTR walk(Va virt, unsigned level = Depth) const
608 { return const_cast<Walk&>(_base).walk(_Addr::val(virt), level, false, Null_alloc()); }
611 template< typename OPTE_PTR, typename _Alloc >
612 int sync(Va l_addr, Base< OPTE_PTR, _Traits, _Addr> const *_r,
613 Va r_addr, Vs size, unsigned level = Depth,
614 bool force_write_back = false,
615 _Alloc const &alloc = _Alloc())
617 Address la = _Addr::val(l_addr);
618 Address ra = _Addr::val(r_addr);
619 Address sz = _Addr::val(size);
620 return _base.sync(la, _r->_base,
621 ra, sz, level, force_write_back,
625 void clear(bool force_write_back)
626 { _base.clear(force_write_back); }
628 void unmap(Va virt, Vs size, unsigned level, bool force_write_back)
630 Address va = _Addr::val(virt);
631 unsigned long sz = _Addr::val(size);
632 _base.unmap(va, sz, level, force_write_back);
635 template< typename _Alloc >
636 void map(Address phys, Va virt, Vs size, unsigned long attr,
637 unsigned level, bool force_write_back,
638 _Alloc const &alloc = _Alloc())
640 Address va = _Addr::val(virt);
641 unsigned long sz = _Addr::val(size);
642 _base.map(phys, va, sz, attr, level, force_write_back, alloc);
645 template< typename _Alloc >
646 void destroy(Va start, Va end, unsigned start_level, unsigned end_level,
647 _Alloc const &alloc = _Alloc())
649 _base.destroy(_Addr::val(start), _Addr::val(end),
650 start_level, end_level, alloc);
654 template< typename _New_alloc >
655 Base<_Base_entry, _Traits, _New_alloc, _Addr> *alloc_cast()
656 { return reinterpret_cast<Base<_Base_entry, _Traits, _New_alloc, _Addr> *>(this); }
658 template< typename _New_alloc >
659 Base<_Base_entry, _Traits, _New_alloc, _Addr> const *alloc_cast() const
660 { return reinterpret_cast<Base<_Base_entry, _Traits, _New_alloc, _Addr> const *>(this); }