6 EXTENSION class Generic_obj_space
8 // do not use the virtually mapped cap table in
9 // v_lookup and v_insert, because the map logic needs the kernel
10 // address for link pointers in the map-nodes and these addresses must
11 // be valid in all address spaces.
12 enum { Optimize_local = 0 };
24 #include "mapped_alloc.h"
25 #include "mem_layout.h"
28 PRIVATE template< typename SPACE >
30 Generic_obj_space<SPACE>::mem_space() const
31 { return SPACE::space(this)->mem_space(); }
33 PRIVATE template< typename SPACE >
34 static inline NEEDS["mem_layout.h"]
35 typename Generic_obj_space<SPACE>::Entry *
36 Generic_obj_space<SPACE>::cap_virt(Address index)
37 { return reinterpret_cast<Entry*>(Mem_layout::Caps_start) + index; }
39 PRIVATE template< typename SPACE >
40 inline NEEDS["mem_space.h", "mem_layout.h", Generic_obj_space::cap_virt]
41 typename Generic_obj_space<SPACE>::Entry *
42 Generic_obj_space<SPACE>::alien_lookup(Address index)
44 Address phys = Address(mem_space()->virt_to_phys((Address)cap_virt(index)));
45 if (EXPECT_FALSE(phys == ~0UL))
48 return reinterpret_cast<Entry*>(Mem_layout::phys_to_pmem(phys));
51 PRIVATE template< typename SPACE >
52 typename Generic_obj_space<SPACE>::Entry *
53 Generic_obj_space<SPACE>::get_cap(Address index)
54 { return alien_lookup(index); }
56 PUBLIC template< typename SPACE >
57 inline NEEDS["mem_space.h"]
59 Generic_obj_space<SPACE>::ram_quota() const
60 { return mem_space()->ram_quota(); }
63 PRIVATE template< typename SPACE >
64 /*inline NEEDS["mapped_alloc.h", <cstring>, "ram_quota.h",
65 Generic_obj_space::cap_virt]*/
66 typename Generic_obj_space<SPACE>::Entry *
67 Generic_obj_space<SPACE>::caps_alloc(Address virt)
69 Address cv = (Address)cap_virt(virt);
70 void *mem = Mapped_allocator::allocator()->q_unaligned_alloc(ram_quota(), Config::PAGE_SIZE);
75 add_dbg_info(mem, this, virt);
77 Mem::memset_mwords(mem, 0, Config::PAGE_SIZE / sizeof(Mword));
80 s = mem_space()->v_insert(
81 Mem_space::Phys_addr::create(Mem_space::kernel_space()->virt_to_phys((Address)mem)),
82 Mem_space::Addr::create(cv).trunc(Mem_space::Size::create(Config::PAGE_SIZE)),
83 Mem_space::Size::create(Config::PAGE_SIZE),
84 Mem_space::Page_cacheable | Mem_space::Page_writable
85 | Mem_space::Page_referenced | Mem_space::Page_dirty);
90 case Insert_warn_exists:
91 case Insert_warn_attrib_upgrade:
92 case Insert_err_exists:
94 case Insert_err_nomem:
95 Mapped_allocator::allocator()->q_unaligned_free(ram_quota(),
96 Config::PAGE_SIZE, mem);
100 unsigned long cap = cv & (Config::PAGE_SIZE - 1) | (unsigned long)mem;
102 return reinterpret_cast<Entry*>(cap);
105 PRIVATE template< typename SPACE >
107 Generic_obj_space<SPACE>::caps_free()
109 Mapped_allocator *a = Mapped_allocator::allocator();
110 for (unsigned long i = 0; i < map_max_address().value();
113 Entry *c = get_cap(i);
117 Address cp = Address(mem_space()->virt_to_phys(Address(c)));
118 assert_kdb (cp != ~0UL);
119 void *cv = (void*)Mem_layout::phys_to_pmem(cp);
122 a->q_unaligned_free(ram_quota(), Config::PAGE_SIZE, cv);
124 #if defined (CONFIG_ARM)
125 mem_space()->dir()->free_page_tables((void*)Mem_layout::Caps_start, (void*)Mem_layout::Caps_end);
127 mem_space()->dir()->Pdir::alloc_cast<Mem_space_q_alloc>()
128 ->destroy(Virt_addr(Mem_layout::Caps_start),
129 Virt_addr(Mem_layout::Caps_end), Pdir::Depth - 1,
130 Mem_space_q_alloc(ram_quota(), Mapped_allocator::allocator()));
135 // Utilities for map<Generic_obj_space> and unmap<Generic_obj_space>
138 PUBLIC template< typename SPACE >
139 inline NEEDS[Generic_obj_space::cap_virt, Generic_obj_space::get_cap]
141 Generic_obj_space<SPACE>::v_lookup(Addr const &virt, Phys_addr *phys = 0,
142 Size *size = 0, unsigned *attribs = 0)
144 if (size) size->set_value(1);
148 && mem_space() == Mem_space::current_mem_space(current_cpu()))
149 cap = cap_virt(virt.value());
151 cap = get_cap(virt.value());
153 if (EXPECT_FALSE(!cap))
155 if (size) size->set_value(Caps_per_page);
161 Capability c = Mem_layout::read_special_safe((Capability*)cap);
163 if (phys) *phys = c.obj();
164 if (c.valid() && attribs) *attribs = c.rights();
169 Obj::set_entry(virt, cap);
170 if (phys) *phys = cap->obj();
171 if (cap->valid() && attribs) *attribs = cap->rights();
176 PUBLIC template< typename SPACE >
177 inline NEEDS [Generic_obj_space::cap_virt, Generic_obj_space::get_cap]
178 typename Generic_obj_space<SPACE>::Capability
179 Generic_obj_space<SPACE>::lookup(Address virt)
182 virt &= ~(~0UL << Whole_space);
184 if (mem_space() == Mem_space::current_mem_space(current_cpu()))
185 c = reinterpret_cast<Capability*>(cap_virt(virt));
189 if (EXPECT_FALSE(!c))
190 return Capability(0); // void
192 return Mem_layout::read_special_safe(c);
195 PUBLIC template< typename SPACE >
196 inline NEEDS [Generic_obj_space::cap_virt]
198 Generic_obj_space<SPACE>::lookup_local(Address virt, unsigned char *rights = 0)
200 virt &= ~(~0UL << Whole_space);
201 Capability *c = reinterpret_cast<Capability*>(cap_virt(virt));
202 Capability cap = Mem_layout::read_special_safe(c);
203 if (rights) *rights = cap.rights();
208 PUBLIC template< typename SPACE >
209 inline NEEDS[<cassert>, Generic_obj_space::cap_virt, Generic_obj_space::get_cap]
211 Generic_obj_space<SPACE>::v_delete(Page_number virt, Size size,
212 unsigned long page_attribs = L4_fpage::CRWSD)
215 assert (size.value() == 1);
219 && mem_space() == Mem_space::current_mem_space(current_cpu()))
221 c = cap_virt(virt.value());
225 Capability cap = Mem_layout::read_special_safe((Capability*)c);
230 c = get_cap(virt.value());
234 if (page_attribs & L4_fpage::R)
237 c->del_rights(page_attribs & L4_fpage::CWSD);
243 PUBLIC template< typename SPACE >
244 inline NEEDS[Generic_obj_space::cap_virt, Generic_obj_space::caps_alloc,
245 Generic_obj_space::alien_lookup, "kdb_ke.h"]
246 typename Generic_obj_space<SPACE>::Status
247 Generic_obj_space<SPACE>::v_insert(Phys_addr phys, Addr const &virt, Size size,
248 unsigned char page_attribs)
251 assert (size.value() == 1);
256 && mem_space() == Mem_space::current_mem_space(current_cpu()))
258 c = cap_virt(virt.value());
260 return Insert_err_nomem;
263 if (!Mem_layout::read_special_safe((Capability*)c, cap)
264 && !caps_alloc(virt.value()))
265 return Insert_err_nomem;
269 c = alien_lookup(virt.value());
270 if (!c && !(c = caps_alloc(virt.value())))
271 return Insert_err_nomem;
272 Obj::set_entry(virt, c);
277 if (c->obj() == phys)
279 if (EXPECT_FALSE(c->rights() == page_attribs))
280 return Insert_warn_exists;
282 c->add_rights(page_attribs);
283 return Insert_warn_attrib_upgrade;
286 return Insert_err_exists;
289 c->set(phys, page_attribs);
294 PUBLIC template< typename SPACE >
296 typename Generic_obj_space<SPACE>::Addr
297 Generic_obj_space<SPACE>::map_max_address()
301 r = (Mem_layout::Caps_end - Mem_layout::Caps_start) / sizeof(Entry);
302 if (Map_max_address < r)