]> rtime.felk.cvut.cz Git - l4.git/blob - kernel/fiasco/src/kern/obj_space-virt.cpp
d42f551ae03470e1117ced1af5189661d9ff30d2
[l4.git] / kernel / fiasco / src / kern / obj_space-virt.cpp
1 INTERFACE:
2
3 #include "mem.h"
4 #include "mem_space.h"
5 #include "ram_quota.h"
6
7 EXTENSION class Generic_obj_space
8 {
9   // do not use the virtually mapped cap table in
10   // v_lookup and v_insert, because the map logic needs the kernel
11   // address for link pointers in the map-nodes and these addresses must
12   // be valid in all address spaces.
13   enum { Optimize_local = 0 };
14 };
15
16 IMPLEMENTATION:
17
18 #include <cstring>
19 #include <cassert>
20
21 #include "atomic.h"
22 #include "config.h"
23 #include "cpu.h"
24 #include "kdb_ke.h"
25 #include "kmem_alloc.h"
26 #include "mem_layout.h"
27
28 PRIVATE template< typename SPACE > inline
29 Mem_space *
30 Generic_obj_space<SPACE>::mem_space()
31 { return static_cast<SPACE*>(this); }
32
33 PRIVATE  template< typename SPACE >
34 static inline NEEDS["mem_layout.h"]
35 typename Generic_obj_space<SPACE>::Entry *
36 Generic_obj_space<SPACE>::cap_virt(Address index)
37 { return reinterpret_cast<Entry*>(Mem_layout::Caps_start) + index; }
38
39 PRIVATE  template< typename SPACE >
40 inline NEEDS["mem_space.h", "mem_layout.h", Generic_obj_space::cap_virt]
41 typename Generic_obj_space<SPACE>::Entry *
42 Generic_obj_space<SPACE>::alien_lookup(Address index)
43 {
44   Mem_space *ms = mem_space();
45
46   Address phys = Address(ms->virt_to_phys((Address)cap_virt(index)));
47   if (EXPECT_FALSE(phys == ~0UL))
48     return 0;
49
50   return reinterpret_cast<Entry*>(Mem_layout::phys_to_pmem(phys));
51 }
52
53 PRIVATE template< typename SPACE >
54 typename Generic_obj_space<SPACE>::Entry *
55 Generic_obj_space<SPACE>::get_cap(Address index)
56 { return alien_lookup(index); }
57
58 PUBLIC  template< typename SPACE >
59 inline NEEDS["mem_space.h"]
60 Ram_quota *
61 Generic_obj_space<SPACE>::ram_quota() const
62 { return static_cast<SPACE const *>(this)->ram_quota(); }
63
64
65 PRIVATE  template< typename SPACE >
66 /*inline NEEDS["kmem_alloc.h", <cstring>, "ram_quota.h",
67                      Generic_obj_space::cap_virt]*/
68 typename Generic_obj_space<SPACE>::Entry *
69 Generic_obj_space<SPACE>::caps_alloc(Address virt)
70 {
71   Address cv = (Address)cap_virt(virt);
72   void *mem = Kmem_alloc::allocator()->q_unaligned_alloc(ram_quota(), Config::PAGE_SIZE);
73
74   if (!mem)
75     return 0;
76
77   add_dbg_info(mem, this, virt);
78
79   Mem::memset_mwords(mem, 0, Config::PAGE_SIZE / sizeof(Mword));
80
81   Mem_space::Status s;
82   s = mem_space()->v_insert(
83       Mem_space::Phys_addr::create(Mem_space::kernel_space()->virt_to_phys((Address)mem)),
84       Mem_space::Addr::create(cv).trunc(Mem_space::Size::create(Config::PAGE_SIZE)),
85       Mem_space::Size::create(Config::PAGE_SIZE),
86       Mem_space::Page_cacheable | Mem_space::Page_writable
87       | Mem_space::Page_referenced | Mem_space::Page_dirty);
88
89   switch (s)
90     {
91     case Insert_ok:
92       break;
93     case Insert_warn_exists:
94     case Insert_warn_attrib_upgrade:
95       assert (false);
96       break;
97     case Insert_err_exists:
98     case Insert_err_nomem:
99       Kmem_alloc::allocator()->q_unaligned_free(ram_quota(),
100           Config::PAGE_SIZE, mem);
101       return 0;
102     };
103
104   unsigned long cap = cv & (Config::PAGE_SIZE - 1) | (unsigned long)mem;
105
106   return reinterpret_cast<Entry*>(cap);
107 }
108
109 PRIVATE template< typename SPACE >
110 void
111 Generic_obj_space<SPACE>::caps_free()
112 {
113   Mem_space *ms = mem_space();
114   if (EXPECT_FALSE(!ms || !ms->dir()))
115     return;
116
117   Kmem_alloc *a = Kmem_alloc::allocator();
118   for (unsigned long i = 0; i < map_max_address().value();
119        i += Caps_per_page)
120     {
121       Entry *c = get_cap(i);
122       if (!c)
123         continue;
124
125       Address cp = Address(ms->virt_to_phys(Address(c)));
126       assert_kdb (cp != ~0UL);
127       void *cv = (void*)Mem_layout::phys_to_pmem(cp);
128       remove_dbg_info(cv);
129
130       a->q_unaligned_free(ram_quota(), Config::PAGE_SIZE, cv);
131     }
132 #if defined (CONFIG_ARM)
133   ms->dir()->free_page_tables((void*)Mem_layout::Caps_start, (void*)Mem_layout::Caps_end, Kmem_alloc::q_allocator(ram_quota()));
134 #else
135   ms->dir()->destroy(Virt_addr(Mem_layout::Caps_start),
136                      Virt_addr(Mem_layout::Caps_end), Pdir::Depth - 1,
137                      Kmem_alloc::q_allocator(ram_quota()));
138 #endif
139 }
140
141 //
142 // Utilities for map<Generic_obj_space> and unmap<Generic_obj_space>
143 //
144
145 PUBLIC  template< typename SPACE >
146 inline NEEDS[Generic_obj_space::cap_virt, Generic_obj_space::get_cap]
147 bool
148 Generic_obj_space<SPACE>::v_lookup(Addr const &virt, Phys_addr *phys = 0,
149                                    Size *size = 0, unsigned *attribs = 0)
150 {
151   if (size) size->set_value(1);
152   Entry *cap;
153
154   if (Optimize_local
155       && mem_space() == Mem_space::current_mem_space(current_cpu()))
156     cap = cap_virt(virt.value());
157   else
158     cap = get_cap(virt.value());
159
160   if (EXPECT_FALSE(!cap))
161     {
162       if (size) size->set_value(Caps_per_page);
163       return false;
164     }
165
166   if (Optimize_local)
167     {
168       Capability c = Mem_layout::read_special_safe((Capability*)cap);
169
170       if (phys) *phys = c.obj();
171       if (c.valid() && attribs) *attribs = c.rights();
172       return c.valid();
173     }
174   else
175     {
176       Obj::set_entry(virt, cap);
177       if (phys) *phys = cap->obj();
178       if (cap->valid() && attribs) *attribs = cap->rights();
179       return cap->valid();
180     }
181 }
182
183 PUBLIC template< typename SPACE >
184 inline NEEDS [Generic_obj_space::cap_virt, Generic_obj_space::get_cap]
185 typename Generic_obj_space<SPACE>::Capability
186 Generic_obj_space<SPACE>::lookup(Address virt)
187 {
188   Capability *c;
189   virt &= ~(~0UL << Whole_space);
190
191   if (mem_space() == Mem_space::current_mem_space(current_cpu()))
192     c = reinterpret_cast<Capability*>(cap_virt(virt));
193   else
194     c = get_cap(virt);
195
196   if (EXPECT_FALSE(!c))
197     return Capability(0); // void
198
199   return Mem_layout::read_special_safe(c);
200 }
201
202 PUBLIC template< typename SPACE >
203 inline NEEDS [Generic_obj_space::cap_virt]
204 Kobject_iface *
205 Generic_obj_space<SPACE>::lookup_local(Address virt, unsigned char *rights = 0)
206 {
207   virt &= ~(~0UL << Whole_space);
208   Capability *c = reinterpret_cast<Capability*>(cap_virt(virt));
209   Capability cap = Mem_layout::read_special_safe(c);
210   if (rights) *rights = cap.rights();
211   return cap.obj();
212 }
213
214
215 PUBLIC template< typename SPACE >
216 inline NEEDS[<cassert>, Generic_obj_space::cap_virt, Generic_obj_space::get_cap]
217 unsigned long
218 Generic_obj_space<SPACE>::v_delete(Page_number virt, Size size,
219                                    unsigned long page_attribs = L4_fpage::CRWSD)
220 {
221   (void)size;
222   assert (size.value() == 1);
223
224   Entry *c;
225   if (Optimize_local
226       && mem_space() == Mem_space::current_mem_space(current_cpu()))
227     {
228       c = cap_virt(virt.value());
229       if (!c)
230         return 0;
231
232       Capability cap = Mem_layout::read_special_safe((Capability*)c);
233       if (!cap.valid())
234         return 0;
235     }
236   else
237     c = get_cap(virt.value());
238
239   if (c && c->valid())
240     {
241       if (page_attribs & L4_fpage::R)
242         c->invalidate();
243       else
244         c->del_rights(page_attribs & L4_fpage::CWSD);
245     }
246
247   return 0;
248 }
249
250 PUBLIC  template< typename SPACE >
251 inline NEEDS[Generic_obj_space::cap_virt, Generic_obj_space::caps_alloc,
252              Generic_obj_space::alien_lookup, "kdb_ke.h"]
253 typename Generic_obj_space<SPACE>::Status
254 Generic_obj_space<SPACE>::v_insert(Phys_addr phys, Addr const &virt, Size size,
255                                    unsigned char page_attribs)
256 {
257   (void)size;
258   assert (size.value() == 1);
259
260   Entry *c;
261
262   if (Optimize_local
263       && mem_space() == Mem_space::current_mem_space(current_cpu()))
264     {
265       c = cap_virt(virt.value());
266       if (!c)
267         return Insert_err_nomem;
268
269       Capability cap;
270       if (!Mem_layout::read_special_safe((Capability*)c, cap)
271           && !caps_alloc(virt.value()))
272         return Insert_err_nomem;
273     }
274   else
275     {
276       c = alien_lookup(virt.value());
277       if (!c && !(c = caps_alloc(virt.value())))
278         return Insert_err_nomem;
279       Obj::set_entry(virt, c);
280     }
281
282   if (c->valid())
283     {
284       if (c->obj() == phys)
285         {
286           if (EXPECT_FALSE(c->rights() == page_attribs))
287             return Insert_warn_exists;
288
289           c->add_rights(page_attribs);
290           return Insert_warn_attrib_upgrade;
291         }
292       else
293         return Insert_err_exists;
294     }
295
296   c->set(phys, page_attribs);
297   return Insert_ok;
298 }
299
300
301 PUBLIC  template< typename SPACE >
302 static inline
303 typename Generic_obj_space<SPACE>::Addr
304 Generic_obj_space<SPACE>::map_max_address()
305 {
306   Mword r;
307
308   r = (Mem_layout::Caps_end - Mem_layout::Caps_start) / sizeof(Entry);
309   if (Map_max_address < r)
310     r = Map_max_address;
311
312   return Addr(r);
313 }
314
315