6 EXTENSION class Generic_obj_space
8 // do not use the virtually mapped cap table in
9 // v_lookup and v_insert, because the map logic needs the kernel
10 // address for link pointers in the map-nodes and these addresses must
11 // be valid in all address spaces.
12 enum { Optimize_local = 0 };
24 #include "mapped_alloc.h"
25 #include "mem_layout.h"
28 PRIVATE template< typename SPACE >
30 Generic_obj_space<SPACE>::mem_space() const
31 { return SPACE::space(this)->mem_space(); }
33 PRIVATE template< typename SPACE >
34 static inline NEEDS["mem_layout.h"]
35 typename Generic_obj_space<SPACE>::Entry *
36 Generic_obj_space<SPACE>::cap_virt(Address index)
37 { return reinterpret_cast<Entry*>(Mem_layout::Caps_start) + index; }
39 PRIVATE template< typename SPACE >
40 inline NEEDS["mem_space.h", "mem_layout.h", Generic_obj_space::cap_virt]
41 typename Generic_obj_space<SPACE>::Entry *
42 Generic_obj_space<SPACE>::alien_lookup(Address index)
44 Mem_space *ms = mem_space();
46 Address phys = Address(ms->virt_to_phys((Address)cap_virt(index)));
47 if (EXPECT_FALSE(phys == ~0UL))
50 return reinterpret_cast<Entry*>(Mem_layout::phys_to_pmem(phys));
53 PRIVATE template< typename SPACE >
54 typename Generic_obj_space<SPACE>::Entry *
55 Generic_obj_space<SPACE>::get_cap(Address index)
56 { return alien_lookup(index); }
58 PUBLIC template< typename SPACE >
59 inline NEEDS["mem_space.h"]
61 Generic_obj_space<SPACE>::ram_quota() const
62 { return mem_space()->ram_quota(); }
65 PRIVATE template< typename SPACE >
66 /*inline NEEDS["mapped_alloc.h", <cstring>, "ram_quota.h",
67 Generic_obj_space::cap_virt]*/
68 typename Generic_obj_space<SPACE>::Entry *
69 Generic_obj_space<SPACE>::caps_alloc(Address virt)
71 Address cv = (Address)cap_virt(virt);
72 void *mem = Mapped_allocator::allocator()->q_unaligned_alloc(ram_quota(), Config::PAGE_SIZE);
77 add_dbg_info(mem, this, virt);
79 Mem::memset_mwords(mem, 0, Config::PAGE_SIZE / sizeof(Mword));
82 s = mem_space()->v_insert(
83 Mem_space::Phys_addr::create(Mem_space::kernel_space()->virt_to_phys((Address)mem)),
84 Mem_space::Addr::create(cv).trunc(Mem_space::Size::create(Config::PAGE_SIZE)),
85 Mem_space::Size::create(Config::PAGE_SIZE),
86 Mem_space::Page_cacheable | Mem_space::Page_writable
87 | Mem_space::Page_referenced | Mem_space::Page_dirty);
92 case Insert_warn_exists:
93 case Insert_warn_attrib_upgrade:
94 case Insert_err_exists:
96 case Insert_err_nomem:
97 Mapped_allocator::allocator()->q_unaligned_free(ram_quota(),
98 Config::PAGE_SIZE, mem);
102 unsigned long cap = cv & (Config::PAGE_SIZE - 1) | (unsigned long)mem;
104 return reinterpret_cast<Entry*>(cap);
107 PRIVATE template< typename SPACE >
109 Generic_obj_space<SPACE>::caps_free()
111 Mem_space *ms = mem_space();
112 if (EXPECT_FALSE(!ms || !ms->dir()))
115 Mapped_allocator *a = Mapped_allocator::allocator();
116 for (unsigned long i = 0; i < map_max_address().value();
119 Entry *c = get_cap(i);
123 Address cp = Address(ms->virt_to_phys(Address(c)));
124 assert_kdb (cp != ~0UL);
125 void *cv = (void*)Mem_layout::phys_to_pmem(cp);
128 a->q_unaligned_free(ram_quota(), Config::PAGE_SIZE, cv);
130 #if defined (CONFIG_ARM)
131 ms->dir()->free_page_tables((void*)Mem_layout::Caps_start, (void*)Mem_layout::Caps_end);
133 ms->dir()->Pdir::alloc_cast<Mem_space_q_alloc>()
134 ->destroy(Virt_addr(Mem_layout::Caps_start),
135 Virt_addr(Mem_layout::Caps_end), Pdir::Depth - 1,
136 Mem_space_q_alloc(ram_quota(), Mapped_allocator::allocator()));
141 // Utilities for map<Generic_obj_space> and unmap<Generic_obj_space>
144 PUBLIC template< typename SPACE >
145 inline NEEDS[Generic_obj_space::cap_virt, Generic_obj_space::get_cap]
147 Generic_obj_space<SPACE>::v_lookup(Addr const &virt, Phys_addr *phys = 0,
148 Size *size = 0, unsigned *attribs = 0)
150 if (size) size->set_value(1);
154 && mem_space() == Mem_space::current_mem_space(current_cpu()))
155 cap = cap_virt(virt.value());
157 cap = get_cap(virt.value());
159 if (EXPECT_FALSE(!cap))
161 if (size) size->set_value(Caps_per_page);
167 Capability c = Mem_layout::read_special_safe((Capability*)cap);
169 if (phys) *phys = c.obj();
170 if (c.valid() && attribs) *attribs = c.rights();
175 Obj::set_entry(virt, cap);
176 if (phys) *phys = cap->obj();
177 if (cap->valid() && attribs) *attribs = cap->rights();
182 PUBLIC template< typename SPACE >
183 inline NEEDS [Generic_obj_space::cap_virt, Generic_obj_space::get_cap]
184 typename Generic_obj_space<SPACE>::Capability
185 Generic_obj_space<SPACE>::lookup(Address virt)
188 virt &= ~(~0UL << Whole_space);
190 if (mem_space() == Mem_space::current_mem_space(current_cpu()))
191 c = reinterpret_cast<Capability*>(cap_virt(virt));
195 if (EXPECT_FALSE(!c))
196 return Capability(0); // void
198 return Mem_layout::read_special_safe(c);
201 PUBLIC template< typename SPACE >
202 inline NEEDS [Generic_obj_space::cap_virt]
204 Generic_obj_space<SPACE>::lookup_local(Address virt, unsigned char *rights = 0)
206 virt &= ~(~0UL << Whole_space);
207 Capability *c = reinterpret_cast<Capability*>(cap_virt(virt));
208 Capability cap = Mem_layout::read_special_safe(c);
209 if (rights) *rights = cap.rights();
214 PUBLIC template< typename SPACE >
215 inline NEEDS[<cassert>, Generic_obj_space::cap_virt, Generic_obj_space::get_cap]
217 Generic_obj_space<SPACE>::v_delete(Page_number virt, Size size,
218 unsigned long page_attribs = L4_fpage::CRWSD)
221 assert (size.value() == 1);
225 && mem_space() == Mem_space::current_mem_space(current_cpu()))
227 c = cap_virt(virt.value());
231 Capability cap = Mem_layout::read_special_safe((Capability*)c);
236 c = get_cap(virt.value());
240 if (page_attribs & L4_fpage::R)
243 c->del_rights(page_attribs & L4_fpage::CWSD);
249 PUBLIC template< typename SPACE >
250 inline NEEDS[Generic_obj_space::cap_virt, Generic_obj_space::caps_alloc,
251 Generic_obj_space::alien_lookup, "kdb_ke.h"]
252 typename Generic_obj_space<SPACE>::Status
253 Generic_obj_space<SPACE>::v_insert(Phys_addr phys, Addr const &virt, Size size,
254 unsigned char page_attribs)
257 assert (size.value() == 1);
262 && mem_space() == Mem_space::current_mem_space(current_cpu()))
264 c = cap_virt(virt.value());
266 return Insert_err_nomem;
269 if (!Mem_layout::read_special_safe((Capability*)c, cap)
270 && !caps_alloc(virt.value()))
271 return Insert_err_nomem;
275 c = alien_lookup(virt.value());
276 if (!c && !(c = caps_alloc(virt.value())))
277 return Insert_err_nomem;
278 Obj::set_entry(virt, c);
283 if (c->obj() == phys)
285 if (EXPECT_FALSE(c->rights() == page_attribs))
286 return Insert_warn_exists;
288 c->add_rights(page_attribs);
289 return Insert_warn_attrib_upgrade;
292 return Insert_err_exists;
295 c->set(phys, page_attribs);
300 PUBLIC template< typename SPACE >
302 typename Generic_obj_space<SPACE>::Addr
303 Generic_obj_space<SPACE>::map_max_address()
307 r = (Mem_layout::Caps_end - Mem_layout::Caps_start) / sizeof(Entry);
308 if (Map_max_address < r)