]> rtime.felk.cvut.cz Git - l4.git/blob - kernel/fiasco/src/kern/obj_space-virt.cpp
update
[l4.git] / kernel / fiasco / src / kern / obj_space-virt.cpp
1 INTERFACE:
2
3 #include "mem_space.h"
4 #include "ram_quota.h"
5
6 EXTENSION class Generic_obj_space
7 {
8   // do not use the virtually mapped cap table in
9   // v_lookup and v_insert, because the map logic needs the kernel
10   // address for link pointers in the map-nodes and these addresses must
11   // be valid in all address spaces.
12   enum { Optimize_local = 0 };
13 };
14
15 IMPLEMENTATION:
16
17 #include <cstring>
18 #include <cassert>
19
20 #include "atomic.h"
21 #include "config.h"
22 #include "cpu.h"
23 #include "kdb_ke.h"
24 #include "mapped_alloc.h"
25 #include "mem_layout.h"
26
27
28 PRIVATE template< typename SPACE >
29 Mem_space *
30 Generic_obj_space<SPACE>::mem_space() const
31 { return SPACE::space(this)->mem_space(); }
32
33 PRIVATE  template< typename SPACE >
34 static inline NEEDS["mem_layout.h"]
35 typename Generic_obj_space<SPACE>::Entry *
36 Generic_obj_space<SPACE>::cap_virt(Address index)
37 { return reinterpret_cast<Entry*>(Mem_layout::Caps_start) + index; }
38
39 PRIVATE  template< typename SPACE >
40 inline NEEDS["mem_space.h", "mem_layout.h", Generic_obj_space::cap_virt]
41 typename Generic_obj_space<SPACE>::Entry *
42 Generic_obj_space<SPACE>::alien_lookup(Address index)
43 {
44   Mem_space *ms = mem_space();
45
46   Address phys = Address(ms->virt_to_phys((Address)cap_virt(index)));
47   if (EXPECT_FALSE(phys == ~0UL))
48     return 0;
49
50   return reinterpret_cast<Entry*>(Mem_layout::phys_to_pmem(phys));
51 }
52
53 PRIVATE template< typename SPACE >
54 typename Generic_obj_space<SPACE>::Entry *
55 Generic_obj_space<SPACE>::get_cap(Address index)
56 { return alien_lookup(index); }
57
58 PUBLIC  template< typename SPACE >
59 inline NEEDS["mem_space.h"]
60 Ram_quota *
61 Generic_obj_space<SPACE>::ram_quota() const
62 { return mem_space()->ram_quota(); }
63
64
65 PRIVATE  template< typename SPACE >
66 /*inline NEEDS["mapped_alloc.h", <cstring>, "ram_quota.h",
67                      Generic_obj_space::cap_virt]*/
68 typename Generic_obj_space<SPACE>::Entry *
69 Generic_obj_space<SPACE>::caps_alloc(Address virt)
70 {
71   Address cv = (Address)cap_virt(virt);
72   void *mem = Mapped_allocator::allocator()->q_unaligned_alloc(ram_quota(), Config::PAGE_SIZE);
73
74   if (!mem)
75     return 0;
76
77   add_dbg_info(mem, this, virt);
78
79   Mem::memset_mwords(mem, 0, Config::PAGE_SIZE / sizeof(Mword));
80
81   Mem_space::Status s;
82   s = mem_space()->v_insert(
83       Mem_space::Phys_addr::create(Mem_space::kernel_space()->virt_to_phys((Address)mem)),
84       Mem_space::Addr::create(cv).trunc(Mem_space::Size::create(Config::PAGE_SIZE)),
85       Mem_space::Size::create(Config::PAGE_SIZE),
86       Mem_space::Page_cacheable | Mem_space::Page_writable
87       | Mem_space::Page_referenced | Mem_space::Page_dirty);
88
89   switch (s)
90     {
91     case Insert_ok:
92     case Insert_warn_exists:
93     case Insert_warn_attrib_upgrade:
94     case Insert_err_exists:
95       break;
96     case Insert_err_nomem:
97       Mapped_allocator::allocator()->q_unaligned_free(ram_quota(),
98           Config::PAGE_SIZE, mem);
99       return 0;
100     };
101
102   unsigned long cap = cv & (Config::PAGE_SIZE - 1) | (unsigned long)mem;
103
104   return reinterpret_cast<Entry*>(cap);
105 }
106
107 PRIVATE template< typename SPACE >
108 void
109 Generic_obj_space<SPACE>::caps_free()
110 {
111   Mem_space *ms = mem_space();
112   if (EXPECT_FALSE(!ms || !ms->dir()))
113     return;
114
115   Mapped_allocator *a = Mapped_allocator::allocator();
116   for (unsigned long i = 0; i < map_max_address().value();
117        i += Caps_per_page)
118     {
119       Entry *c = get_cap(i);
120       if (!c)
121         continue;
122
123       Address cp = Address(ms->virt_to_phys(Address(c)));
124       assert_kdb (cp != ~0UL);
125       void *cv = (void*)Mem_layout::phys_to_pmem(cp);
126       remove_dbg_info(cv);
127
128       a->q_unaligned_free(ram_quota(), Config::PAGE_SIZE, cv);
129     }
130 #if defined (CONFIG_ARM)
131   ms->dir()->free_page_tables((void*)Mem_layout::Caps_start, (void*)Mem_layout::Caps_end);
132 #else
133   ms->dir()->Pdir::alloc_cast<Mem_space_q_alloc>()
134     ->destroy(Virt_addr(Mem_layout::Caps_start),
135               Virt_addr(Mem_layout::Caps_end), Pdir::Depth - 1,
136               Mem_space_q_alloc(ram_quota(), Mapped_allocator::allocator()));
137 #endif
138 }
139
140 //
141 // Utilities for map<Generic_obj_space> and unmap<Generic_obj_space>
142 //
143
144 PUBLIC  template< typename SPACE >
145 inline NEEDS[Generic_obj_space::cap_virt, Generic_obj_space::get_cap]
146 bool
147 Generic_obj_space<SPACE>::v_lookup(Addr const &virt, Phys_addr *phys = 0,
148                                    Size *size = 0, unsigned *attribs = 0)
149 {
150   if (size) size->set_value(1);
151   Entry *cap;
152
153   if (Optimize_local
154       && mem_space() == Mem_space::current_mem_space(current_cpu()))
155     cap = cap_virt(virt.value());
156   else
157     cap = get_cap(virt.value());
158
159   if (EXPECT_FALSE(!cap))
160     {
161       if (size) size->set_value(Caps_per_page);
162       return false;
163     }
164
165   if (Optimize_local)
166     {
167       Capability c = Mem_layout::read_special_safe((Capability*)cap);
168
169       if (phys) *phys = c.obj();
170       if (c.valid() && attribs) *attribs = c.rights();
171       return c.valid();
172     }
173   else
174     {
175       Obj::set_entry(virt, cap);
176       if (phys) *phys = cap->obj();
177       if (cap->valid() && attribs) *attribs = cap->rights();
178       return cap->valid();
179     }
180 }
181
182 PUBLIC template< typename SPACE >
183 inline NEEDS [Generic_obj_space::cap_virt, Generic_obj_space::get_cap]
184 typename Generic_obj_space<SPACE>::Capability
185 Generic_obj_space<SPACE>::lookup(Address virt)
186 {
187   Capability *c;
188   virt &= ~(~0UL << Whole_space);
189
190   if (mem_space() == Mem_space::current_mem_space(current_cpu()))
191     c = reinterpret_cast<Capability*>(cap_virt(virt));
192   else
193     c = get_cap(virt);
194
195   if (EXPECT_FALSE(!c))
196     return Capability(0); // void
197
198   return Mem_layout::read_special_safe(c);
199 }
200
201 PUBLIC template< typename SPACE >
202 inline NEEDS [Generic_obj_space::cap_virt]
203 Kobject_iface *
204 Generic_obj_space<SPACE>::lookup_local(Address virt, unsigned char *rights = 0)
205 {
206   virt &= ~(~0UL << Whole_space);
207   Capability *c = reinterpret_cast<Capability*>(cap_virt(virt));
208   Capability cap = Mem_layout::read_special_safe(c);
209   if (rights) *rights = cap.rights();
210   return cap.obj();
211 }
212
213
214 PUBLIC template< typename SPACE >
215 inline NEEDS[<cassert>, Generic_obj_space::cap_virt, Generic_obj_space::get_cap]
216 unsigned long
217 Generic_obj_space<SPACE>::v_delete(Page_number virt, Size size,
218                                    unsigned long page_attribs = L4_fpage::CRWSD)
219 {
220   (void)size;
221   assert (size.value() == 1);
222
223   Entry *c;
224   if (Optimize_local
225       && mem_space() == Mem_space::current_mem_space(current_cpu()))
226     {
227       c = cap_virt(virt.value());
228       if (!c)
229         return 0;
230
231       Capability cap = Mem_layout::read_special_safe((Capability*)c);
232       if (!cap.valid())
233         return 0;
234     }
235   else
236     c = get_cap(virt.value());
237
238   if (c && c->valid())
239     {
240       if (page_attribs & L4_fpage::R)
241         c->invalidate();
242       else
243         c->del_rights(page_attribs & L4_fpage::CWSD);
244     }
245
246   return 0;
247 }
248
249 PUBLIC  template< typename SPACE >
250 inline NEEDS[Generic_obj_space::cap_virt, Generic_obj_space::caps_alloc,
251              Generic_obj_space::alien_lookup, "kdb_ke.h"]
252 typename Generic_obj_space<SPACE>::Status
253 Generic_obj_space<SPACE>::v_insert(Phys_addr phys, Addr const &virt, Size size,
254                                    unsigned char page_attribs)
255 {
256   (void)size;
257   assert (size.value() == 1);
258
259   Entry *c;
260
261   if (Optimize_local
262       && mem_space() == Mem_space::current_mem_space(current_cpu()))
263     {
264       c = cap_virt(virt.value());
265       if (!c)
266         return Insert_err_nomem;
267
268       Capability cap;
269       if (!Mem_layout::read_special_safe((Capability*)c, cap)
270           && !caps_alloc(virt.value()))
271         return Insert_err_nomem;
272     }
273   else
274     {
275       c = alien_lookup(virt.value());
276       if (!c && !(c = caps_alloc(virt.value())))
277         return Insert_err_nomem;
278       Obj::set_entry(virt, c);
279     }
280
281   if (c->valid())
282     {
283       if (c->obj() == phys)
284         {
285           if (EXPECT_FALSE(c->rights() == page_attribs))
286             return Insert_warn_exists;
287
288           c->add_rights(page_attribs);
289           return Insert_warn_attrib_upgrade;
290         }
291       else
292         return Insert_err_exists;
293     }
294
295   c->set(phys, page_attribs);
296   return Insert_ok;
297 }
298
299
300 PUBLIC  template< typename SPACE >
301 static inline
302 typename Generic_obj_space<SPACE>::Addr
303 Generic_obj_space<SPACE>::map_max_address()
304 {
305   Mword r;
306
307   r = (Mem_layout::Caps_end - Mem_layout::Caps_start) / sizeof(Entry);
308   if (Map_max_address < r)
309     r = Map_max_address;
310
311   return Addr(r);
312 }
313
314