]> rtime.felk.cvut.cz Git - l4.git/blob - kernel/fiasco/src/kern/obj_space-virt.cpp
cad2b8c4ea0de05b92aab38ec2d1875bc72f66c4
[l4.git] / kernel / fiasco / src / kern / obj_space-virt.cpp
1 INTERFACE:
2
3 #include "mem_space.h"
4 #include "ram_quota.h"
5
6 EXTENSION class Generic_obj_space
7 {
8   // do not use the virtually mapped cap table in
9   // v_lookup and v_insert, because the map logic needs the kernel
10   // address for link pointers in the map-nodes and these addresses must
11   // be valid in all address spaces.
12   enum { Optimize_local = 0 };
13 };
14
15 IMPLEMENTATION:
16
17 #include <cstring>
18 #include <cassert>
19
20 #include "atomic.h"
21 #include "config.h"
22 #include "cpu.h"
23 #include "kdb_ke.h"
24 #include "mapped_alloc.h"
25 #include "mem_layout.h"
26
27
28 PRIVATE template< typename SPACE >
29 Mem_space *
30 Generic_obj_space<SPACE>::mem_space() const
31 { return SPACE::space(this)->mem_space(); }
32
33 PRIVATE  template< typename SPACE >
34 static inline NEEDS["mem_layout.h"]
35 typename Generic_obj_space<SPACE>::Entry *
36 Generic_obj_space<SPACE>::cap_virt(Address index)
37 { return reinterpret_cast<Entry*>(Mem_layout::Caps_start) + index; }
38
39 PRIVATE  template< typename SPACE >
40 inline NEEDS["mem_space.h", "mem_layout.h", Generic_obj_space::cap_virt]
41 typename Generic_obj_space<SPACE>::Entry *
42 Generic_obj_space<SPACE>::alien_lookup(Address index)
43 {
44   Address phys = Address(mem_space()->virt_to_phys((Address)cap_virt(index)));
45   if (EXPECT_FALSE(phys == ~0UL))
46     return 0;
47
48   return reinterpret_cast<Entry*>(Mem_layout::phys_to_pmem(phys));
49 }
50
51 PRIVATE template< typename SPACE >
52 typename Generic_obj_space<SPACE>::Entry *
53 Generic_obj_space<SPACE>::get_cap(Address index)
54 { return alien_lookup(index); }
55
56 PUBLIC  template< typename SPACE >
57 inline NEEDS["mem_space.h"]
58 Ram_quota *
59 Generic_obj_space<SPACE>::ram_quota() const
60 { return mem_space()->ram_quota(); }
61
62
63 PRIVATE  template< typename SPACE >
64 /*inline NEEDS["mapped_alloc.h", <cstring>, "ram_quota.h",
65                      Generic_obj_space::cap_virt]*/
66 typename Generic_obj_space<SPACE>::Entry *
67 Generic_obj_space<SPACE>::caps_alloc(Address virt)
68 {
69   Address cv = (Address)cap_virt(virt);
70   void *mem = Mapped_allocator::allocator()->q_unaligned_alloc(ram_quota(), Config::PAGE_SIZE);
71
72   if (!mem)
73     return 0;
74
75   add_dbg_info(mem, this, virt);
76
77   Mem::memset_mwords(mem, 0, Config::PAGE_SIZE / sizeof(Mword));
78
79   Mem_space::Status s;
80   s = mem_space()->v_insert(
81       Mem_space::Phys_addr::create(Mem_space::kernel_space()->virt_to_phys((Address)mem)),
82       Mem_space::Addr::create(cv).trunc(Mem_space::Size::create(Config::PAGE_SIZE)),
83       Mem_space::Size::create(Config::PAGE_SIZE),
84       Mem_space::Page_cacheable | Mem_space::Page_writable
85       | Mem_space::Page_referenced | Mem_space::Page_dirty);
86
87   switch (s)
88     {
89     case Insert_ok:
90     case Insert_warn_exists:
91     case Insert_warn_attrib_upgrade:
92     case Insert_err_exists:
93       break;
94     case Insert_err_nomem:
95       Mapped_allocator::allocator()->q_unaligned_free(ram_quota(),
96           Config::PAGE_SIZE, mem);
97       return 0;
98     };
99
100   unsigned long cap = cv & (Config::PAGE_SIZE - 1) | (unsigned long)mem;
101
102   return reinterpret_cast<Entry*>(cap);
103 }
104
105 PRIVATE template< typename SPACE >
106 void
107 Generic_obj_space<SPACE>::caps_free()
108 {
109   Mapped_allocator *a = Mapped_allocator::allocator();
110   for (unsigned long i = 0; i < map_max_address().value();
111        i += Caps_per_page)
112     {
113       Entry *c = get_cap(i);
114       if (!c)
115         continue;
116
117       Address cp = Address(mem_space()->virt_to_phys(Address(c)));
118       assert_kdb (cp != ~0UL);
119       void *cv = (void*)Mem_layout::phys_to_pmem(cp);
120       remove_dbg_info(cv);
121
122       a->q_unaligned_free(ram_quota(), Config::PAGE_SIZE, cv);
123     }
124 #if defined (CONFIG_ARM)
125   mem_space()->dir()->free_page_tables((void*)Mem_layout::Caps_start, (void*)Mem_layout::Caps_end);
126 #else
127   mem_space()->dir()->Pdir::alloc_cast<Mem_space_q_alloc>()
128     ->destroy(Virt_addr(Mem_layout::Caps_start),
129               Virt_addr(Mem_layout::Caps_end), Pdir::Depth - 1,
130               Mem_space_q_alloc(ram_quota(), Mapped_allocator::allocator()));
131 #endif
132 }
133
134 //
135 // Utilities for map<Generic_obj_space> and unmap<Generic_obj_space>
136 //
137
138 PUBLIC  template< typename SPACE >
139 inline NEEDS[Generic_obj_space::cap_virt, Generic_obj_space::get_cap]
140 bool
141 Generic_obj_space<SPACE>::v_lookup(Addr const &virt, Phys_addr *phys = 0,
142                                    Size *size = 0, unsigned *attribs = 0)
143 {
144   if (size) size->set_value(1);
145   Entry *cap;
146
147   if (Optimize_local
148       && mem_space() == Mem_space::current_mem_space(current_cpu()))
149     cap = cap_virt(virt.value());
150   else
151     cap = get_cap(virt.value());
152
153   if (EXPECT_FALSE(!cap))
154     {
155       if (size) size->set_value(Caps_per_page);
156       return false;
157     }
158
159   if (Optimize_local)
160     {
161       Capability c = Mem_layout::read_special_safe((Capability*)cap);
162
163       if (phys) *phys = c.obj();
164       if (c.valid() && attribs) *attribs = c.rights();
165       return c.valid();
166     }
167   else
168     {
169       Obj::set_entry(virt, cap);
170       if (phys) *phys = cap->obj();
171       if (cap->valid() && attribs) *attribs = cap->rights();
172       return cap->valid();
173     }
174 }
175
176 PUBLIC template< typename SPACE >
177 inline NEEDS [Generic_obj_space::cap_virt, Generic_obj_space::get_cap]
178 typename Generic_obj_space<SPACE>::Capability
179 Generic_obj_space<SPACE>::lookup(Address virt)
180 {
181   Capability *c;
182   virt &= ~(~0UL << Whole_space);
183
184   if (mem_space() == Mem_space::current_mem_space(current_cpu()))
185     c = reinterpret_cast<Capability*>(cap_virt(virt));
186   else
187     c = get_cap(virt);
188
189   if (EXPECT_FALSE(!c))
190     return Capability(0); // void
191
192   return Mem_layout::read_special_safe(c);
193 }
194
195 PUBLIC template< typename SPACE >
196 inline NEEDS [Generic_obj_space::cap_virt]
197 Kobject_iface *
198 Generic_obj_space<SPACE>::lookup_local(Address virt, unsigned char *rights = 0)
199 {
200   virt &= ~(~0UL << Whole_space);
201   Capability *c = reinterpret_cast<Capability*>(cap_virt(virt));
202   Capability cap = Mem_layout::read_special_safe(c);
203   if (rights) *rights = cap.rights();
204   return cap.obj();
205 }
206
207
208 PUBLIC template< typename SPACE >
209 inline NEEDS[<cassert>, Generic_obj_space::cap_virt, Generic_obj_space::get_cap]
210 unsigned long
211 Generic_obj_space<SPACE>::v_delete(Page_number virt, Size size,
212                                    unsigned long page_attribs = L4_fpage::CRWSD)
213 {
214   (void)size;
215   assert (size.value() == 1);
216
217   Entry *c;
218   if (Optimize_local
219       && mem_space() == Mem_space::current_mem_space(current_cpu()))
220     {
221       c = cap_virt(virt.value());
222       if (!c)
223         return 0;
224
225       Capability cap = Mem_layout::read_special_safe((Capability*)c);
226       if (!cap.valid())
227         return 0;
228     }
229   else
230     c = get_cap(virt.value());
231
232   if (c && c->valid())
233     {
234       if (page_attribs & L4_fpage::R)
235         c->invalidate();
236       else
237         c->del_rights(page_attribs & L4_fpage::CWSD);
238     }
239
240   return 0;
241 }
242
243 PUBLIC  template< typename SPACE >
244 inline NEEDS[Generic_obj_space::cap_virt, Generic_obj_space::caps_alloc,
245              Generic_obj_space::alien_lookup, "kdb_ke.h"]
246 typename Generic_obj_space<SPACE>::Status
247 Generic_obj_space<SPACE>::v_insert(Phys_addr phys, Addr const &virt, Size size,
248                                    unsigned char page_attribs)
249 {
250   (void)size;
251   assert (size.value() == 1);
252
253   Entry *c;
254
255   if (Optimize_local
256       && mem_space() == Mem_space::current_mem_space(current_cpu()))
257     {
258       c = cap_virt(virt.value());
259       if (!c)
260         return Insert_err_nomem;
261
262       Capability cap;
263       if (!Mem_layout::read_special_safe((Capability*)c, cap)
264           && !caps_alloc(virt.value()))
265         return Insert_err_nomem;
266     }
267   else
268     {
269       c = alien_lookup(virt.value());
270       if (!c && !(c = caps_alloc(virt.value())))
271         return Insert_err_nomem;
272       Obj::set_entry(virt, c);
273     }
274
275   if (c->valid())
276     {
277       if (c->obj() == phys)
278         {
279           if (EXPECT_FALSE(c->rights() == page_attribs))
280             return Insert_warn_exists;
281
282           c->add_rights(page_attribs);
283           return Insert_warn_attrib_upgrade;
284         }
285       else
286         return Insert_err_exists;
287     }
288
289   c->set(phys, page_attribs);
290   return Insert_ok;
291 }
292
293
294 PUBLIC  template< typename SPACE >
295 static inline
296 typename Generic_obj_space<SPACE>::Addr
297 Generic_obj_space<SPACE>::map_max_address()
298 {
299   Mword r;
300
301   r = (Mem_layout::Caps_end - Mem_layout::Caps_start) / sizeof(Entry);
302   if (Map_max_address < r)
303     r = Map_max_address;
304
305   return Addr(r);
306 }
307
308