]> rtime.felk.cvut.cz Git - l4.git/blob - kernel/fiasco/src/kern/arm/mem_op.cpp
update
[l4.git] / kernel / fiasco / src / kern / arm / mem_op.cpp
1 INTERFACE [arm]:
2
3 #include "types.h"
4
5 class Mem_op
6 {
7 public:
8   enum Op_cache
9   {
10     Op_cache_clean_data        = 0x00,
11     Op_cache_flush_data        = 0x01,
12     Op_cache_inv_data          = 0x02,
13     Op_cache_coherent          = 0x03,
14     Op_cache_dma_coherent      = 0x04,
15     Op_cache_dma_coherent_full = 0x05,
16     Op_cache_l2_clean          = 0x06,
17     Op_cache_l2_flush          = 0x07,
18     Op_cache_l2_inv            = 0x08,
19   };
20
21   enum Op_mem
22   {
23     Op_mem_read_data     = 0x10,
24     Op_mem_write_data    = 0x11,
25   };
26 };
27
28 // ------------------------------------------------------------------------
29 IMPLEMENTATION [arm]:
30
31 #include "context.h"
32 #include "entry_frame.h"
33 #include "globals.h"
34 #include "mem.h"
35 #include "mem_space.h"
36 #include "mem_unit.h"
37 #include "outer_cache.h"
38 #include "space.h"
39 #include "warn.h"
40
41 PRIVATE static void
42 Mem_op::l1_inv_dcache(Address start, Address end)
43 {
44   if (start & Mem_unit::Cache_line_mask)
45     {
46       Mem_unit::flush_dcache((void *)start, (void *)start);
47       start += Mem_unit::Cache_line_size;
48       start &= ~Mem_unit::Cache_line_mask;
49     }
50   if (end & Mem_unit::Cache_line_mask)
51     {
52       Mem_unit::flush_dcache((void *)end, (void *)end);
53       end &= ~Mem_unit::Cache_line_mask;
54     }
55
56   if (start < end)
57     Mem_unit::inv_dcache((void *)start, (void *)end);
58 }
59
60 PRIVATE static void
61 Mem_op::inv_icache(Address start, Address end)
62 {
63   if (Address(end) - Address(start) > 0x2000)
64     asm volatile("mcr p15, 0, r0, c7, c5, 0");
65   else
66     {
67       for (start &= ~Mem_unit::Icache_line_mask;
68            start < end; start += Mem_unit::Icache_line_size)
69         asm volatile("mcr p15, 0, %0, c7, c5, 1" : : "r" (start));
70     }
71 }
72
73 PUBLIC static void
74 Mem_op::arm_mem_cache_maint(int op, void const *start, void const *end)
75 {
76   Context *c = current();
77
78   if (EXPECT_FALSE(start > end))
79     return;
80
81   c->set_ignore_mem_op_in_progress(true);
82
83   switch (op)
84     {
85     case Op_cache_clean_data:
86       Mem_unit::clean_dcache(start, end);
87       break;
88
89     case Op_cache_flush_data:
90       Mem_unit::flush_dcache(start, end);
91       break;
92
93     case Op_cache_inv_data:
94       l1_inv_dcache((Address)start, (Address)end);
95       break;
96
97     case Op_cache_coherent:
98       Mem_unit::clean_dcache(start, end);
99       Mem::dsb();
100       Mem_unit::btc_inv();
101       inv_icache(Address(start), Address(end));
102       Mem::dsb();
103       break;
104
105     case Op_cache_l2_clean:
106     case Op_cache_l2_flush:
107     case Op_cache_l2_inv:
108       outer_cache_op(op, Address(start), Address(end));
109       break;
110
111     case Op_cache_dma_coherent:
112         {
113           Mem_unit::flush_dcache(Virt_addr(Address(start)), Virt_addr(Address(end)));
114           outer_cache_op(Op_cache_l2_flush, Address(start), Address(end));
115         }
116       break;
117
118     // We might not want to implement this one but single address outer
119     // cache flushing can be really slow
120     case Op_cache_dma_coherent_full:
121       Mem_unit::flush_dcache();
122       Outer_cache::flush();
123       break;
124
125     default:
126       break;
127     };
128
129   c->set_ignore_mem_op_in_progress(false);
130 }
131
132 PUBLIC static void
133 Mem_op::arm_mem_access(Mword *r)
134 {
135   Address  a = r[1];
136   unsigned w = r[2];
137
138   if (w > 2)
139     return;
140
141   if (!current()->space()->is_user_memory(a, 1 << w))
142     return;
143
144   jmp_buf pf_recovery;
145   int e;
146
147   if ((e = setjmp(pf_recovery)) == 0)
148     {
149       current()->recover_jmp_buf(&pf_recovery);
150
151       switch (r[0])
152         {
153         case Op_mem_read_data:
154           switch (w)
155             {
156             case 0:
157               r[3] = *(unsigned char *)a;
158               break;
159             case 1:
160               r[3] = *(unsigned short *)a;
161               break;
162             case 2:
163               r[3] = *(unsigned int *)a;
164               break;
165             default:
166               break;
167             };
168           break;
169
170         case Op_mem_write_data:
171           switch (w)
172             {
173             case 0:
174               *(unsigned char *)a = r[3];
175               break;
176             case 1:
177               *(unsigned short *)a = r[3];
178               break;
179             case 2:
180               *(unsigned int *)a = r[3];
181               break;
182             default:
183               break;
184             };
185           break;
186
187         default:
188           break;
189         };
190     }
191   else
192     WARN("Unresolved memory access, skipping\n");
193
194   current()->recover_jmp_buf(0);
195 }
196
197 extern "C" void sys_arm_mem_op()
198 {
199   Entry_frame *e = current()->regs();
200   if (EXPECT_FALSE(e->r[0] & 0x10))
201     Mem_op::arm_mem_access(e->r);
202   else
203     Mem_op::arm_mem_cache_maint(e->r[0], (void *)e->r[1], (void *)e->r[2]);
204 }
205
206 // ------------------------------------------------------------------------
207 IMPLEMENTATION [arm && !outer_cache]:
208
209 PRIVATE static inline
210 void
211 Mem_op::outer_cache_op(int, Address, Address)
212 {}
213
214 // ------------------------------------------------------------------------
215 IMPLEMENTATION [arm && outer_cache]:
216
217 PRIVATE static
218 void
219 Mem_op::outer_cache_op(int op, Address start, Address end)
220 {
221
222   Virt_addr s = Virt_addr(start);
223   Virt_addr v = Virt_addr(start);
224   Virt_addr e = Virt_addr(end);
225
226   Context *c = current();
227
228   while (v < e)
229     {
230       Mem_space::Size phys_size;
231       Mem_space::Phys_addr phys_addr;
232       unsigned attrs;
233       bool mapped =    c->mem_space()->v_lookup(Mem_space::Vaddr(v), &phys_addr, &phys_size, &attrs)
234                     && (attrs & Mem_space::Page_user_accessible);
235
236       Virt_size sz = Virt_size(phys_size);
237       Virt_size offs = Virt_size(Virt_addr(v).value() & (Mem_space::Size(phys_size).value() - 1));
238       sz -= offs;
239       if (e - v < sz)
240         sz = e - v;
241
242       if (mapped)
243         {
244           Virt_addr vstart = Virt_addr(phys_addr) | offs;
245           Virt_addr vend = vstart + sz;
246           switch (op)
247             {
248             case Op_cache_l2_clean:
249               Outer_cache::clean(Virt_addr(vstart).value(),
250                                  Virt_addr(vend).value(), false);
251               break;
252             case Op_cache_l2_flush:
253               Outer_cache::flush(Virt_addr(vstart).value(),
254                                  Virt_addr(vend).value(), false);
255               break;
256             case Op_cache_l2_inv:
257               Outer_cache::invalidate(Virt_addr(vstart).value(),
258                                       Virt_addr(vend).value(), false);
259               break;
260             }
261         }
262       v += sz;
263     }
264   Outer_cache::sync();
265 }