1 INTERFACE [arm && hyp]:
5 EXTENSION class Context
27 Unsigned32 contextidr;
32 template< unsigned LREGS >
35 enum { N_lregs = LREGS };
48 /* The followin part is our user API */
59 /* The user API ends here */
61 /* we should align this at a cache line ... */
95 Unsigned32 contextidr;
115 //---------------------------------------------------------------------------
116 IMPLEMENTATION [arm && hyp]:
122 Context::sanitize_user_state(Return_frame *dst) const
124 if (state() & Thread_ext_vcpu_enabled)
126 if ((dst->psr & Proc::Status_mode_mask) == Proc::PSR_m_hyp)
127 dst->psr = (dst->psr & ~Proc::Status_mode_mask) | Proc::PSR_m_usr;
131 dst->psr &= ~(Proc::Status_mode_mask | Proc::Status_interrupts_mask);
132 dst->psr |= Proc::Status_mode_user | Proc::Status_always_mask;
136 IMPLEMENT_OVERRIDE inline NEEDS["mem.h", Context::sanitize_user_state]
138 Context::copy_and_sanitize_trap_state(Trap_state *dst,
139 Trap_state const *src) const
141 Mem::memcpy_mwords(dst, src, 19);
143 dst->psr = access_once(&src->psr);
144 sanitize_user_state(dst);
149 Context::fill_user_state()
155 Context::spill_user_state()
159 PROTECTED static inline
161 Context::vm_state(Vcpu_state *vs)
162 { return reinterpret_cast<Vm_state *>(reinterpret_cast<char *>(vs) + 0x400); }
164 PUBLIC inline NEEDS[Context::vm_state]
166 Context::switch_vm_state(Context *t)
168 Mword _state = state();
169 Mword _to_state = t->state();
170 if (!((_state | _to_state) & Thread_ext_vcpu_enabled))
175 if (_state & Thread_ext_vcpu_enabled)
178 Vm_state *v = vm_state(vcpu_state().access());
179 asm volatile ("mrc p15, 0, %0, c13, c0, 3" : "=r"(_tpidruro));
181 asm volatile ("mrc p15, 4, %0, c1, c1, 0" : "=r"(v->hcr));
182 asm volatile ("mrc p15, 2, %0, c0, c0, 0" : "=r"(v->csselr));
184 asm volatile ("mrc p15, 0, %0, c1, c0, 0" : "=r"(v->sctlr));
185 // we unconditionally trap actlr accesses
186 // asm ("mrc p15, 0, %0, c1, c0, 1" : "=r"(v->actlr));
187 asm volatile ("mrc p15, 0, %0, c1, c0, 2" : "=r"(v->cpacr));
189 asm volatile ("mrrc p15, 0, %Q0, %R0, c2" : "=r"(v->ttbr0));
190 asm volatile ("mrrc p15, 1, %Q0, %R0, c2" : "=r"(v->ttbr1));
191 asm volatile ("mrc p15, 0, %0, c2, c0, 2" : "=r"(v->ttbcr));
193 asm volatile ("mrc p15, 0, %0, c3, c0, 0" : "=r"(v->dacr));
195 asm volatile ("mrc p15, 0, %0, c5, c0, 0" : "=r"(v->dfsr));
196 asm volatile ("mrc p15, 0, %0, c5, c0, 1" : "=r"(v->ifsr));
197 asm volatile ("mrc p15, 0, %0, c5, c1, 0" : "=r"(v->adfsr));
198 asm volatile ("mrc p15, 0, %0, c5, c1, 1" : "=r"(v->aifsr));
200 asm volatile ("mrc p15, 0, %0, c6, c0, 0" : "=r"(v->dfar));
201 asm volatile ("mrc p15, 0, %0, c6, c0, 2" : "=r"(v->ifar));
203 asm volatile ("mrrc p15, 0, %Q0, %R0, c7" : "=r"(v->par));
205 asm volatile ("mrc p15, 0, %0, c10, c2, 0" : "=r"(v->mair0));
206 asm volatile ("mrc p15, 0, %0, c10, c2, 1" : "=r"(v->mair1));
208 asm volatile ("mrc p15, 0, %0, c10, c3, 0" : "=r"(v->amair0));
209 asm volatile ("mrc p15, 0, %0, c10, c3, 1" : "=r"(v->amair1));
211 asm volatile ("mrc p15, 0, %0, c12, c0, 0" : "=r"(v->vbar));
213 asm volatile ("mrc p15, 0, %0, c13, c0, 0" : "=r"(v->fcseidr));
214 asm volatile ("mrc p15, 0, %0, c13, c0, 1" : "=r"(v->contextidr));
215 asm volatile ("mrc p15, 0, %0, c13, c0, 4" : "=r"(v->tpidrprw));
217 #define SAVE_GP_MODE(m) \
218 asm volatile ("mrs %0, SP_"#m : "=r"(v->m.sp)); \
219 asm volatile ("mrs %0, LR_"#m : "=r"(v->m.lr)); \
220 asm volatile ("mrs %0, SPSR_"#m : "=r"(v->m.spsr))
226 asm volatile ("mrs %0, R8_fiq" : "=r"(v->fiq_r8));
227 asm volatile ("mrs %0, R9_fiq" : "=r"(v->fiq_r9));
228 asm volatile ("mrs %0, R10_fiq" : "=r"(v->fiq_r10));
229 asm volatile ("mrs %0, R11_fiq" : "=r"(v->fiq_r11));
230 asm volatile ("mrs %0, R12_fiq" : "=r"(v->fiq_r12));
234 asm volatile ("mrrc p15, 3, %Q0, %R0, c14" : "=r" (v->cntv_cval));
235 asm volatile ("mrrc p15, 4, %Q0, %R0, c14" : "=r" (v->cntvoff));
236 asm volatile ("mrc p15, 0, %0, c14, c1, 0" : "=r" (v->cntkctl));
237 if ((_state & Thread_vcpu_user))
238 asm volatile ("mrc p15, 0, %0, c14, c3, 1" : "=r" (v->cntv_ctl));
240 if ((_state & Thread_vcpu_user) && Gic_h::gic->hcr().en())
243 v->gic.vmcr = Gic_h::gic->vmcr();
244 v->gic.misr = Gic_h::gic->misr();
246 for (unsigned i = 0; i < ((Vm_state::Gic::N_lregs + 31) / 32); ++i)
247 v->gic.eisr[i] = Gic_h::gic->eisr(i);
249 for (unsigned i = 0; i < ((Vm_state::Gic::N_lregs + 31) / 32); ++i)
250 v->gic.elsr[i] = Gic_h::gic->elsr(i);
252 v->gic.apr = Gic_h::gic->apr();
254 for (unsigned i = 0; i < Vm_state::Gic::N_lregs; ++i)
255 v->gic.lr[i] = Gic_h::gic->lr(i);
259 if (_to_state & Thread_ext_vcpu_enabled)
261 Vm_state const *v = vm_state(t->vcpu_state().access());
262 Unsigned32 hcr = access_once(&v->hcr) | Cpu::Hcr_must_set_bits;
263 asm volatile ("mcr p15, 4, %0, c1, c1, 0" : : "r"(hcr));
264 asm volatile ("mcr p15, 2, %0, c0, c0, 0" : : "r"(v->csselr));
266 Unsigned32 sctlr = access_once(&v->sctlr);
267 if (hcr & Cpu::Hcr_tge)
268 sctlr &= ~Cpu::Cp15_c1_mmu;
270 asm volatile ("mcr p15, 0, %0, c1, c0, 0" : : "r"(sctlr));
271 // we unconditionally trap actlr accesses
272 // asm ("mcr p15, 0, %0, c1, c0, 1" : : "r"(v->actlr));
273 asm volatile ("mcr p15, 0, %0, c1, c0, 2" : : "r"(v->cpacr));
275 asm volatile ("mcrr p15, 0, %Q0, %R0, c2" : : "r"(v->ttbr0));
276 asm volatile ("mcrr p15, 1, %Q0, %R0, c2" : : "r"(v->ttbr1));
277 asm volatile ("mcr p15, 0, %0, c2, c0, 2" : : "r"(v->ttbcr));
279 asm volatile ("mcr p15, 0, %0, c3, c0, 0" : : "r"(v->dacr));
281 asm volatile ("mcr p15, 0, %0, c5, c0, 0" : : "r"(v->dfsr));
282 asm volatile ("mcr p15, 0, %0, c5, c0, 1" : : "r"(v->ifsr));
283 asm volatile ("mcr p15, 0, %0, c5, c1, 0" : : "r"(v->adfsr));
284 asm volatile ("mcr p15, 0, %0, c5, c1, 1" : : "r"(v->aifsr));
286 asm volatile ("mcr p15, 0, %0, c6, c0, 0" : : "r"(v->dfar));
287 asm volatile ("mcr p15, 0, %0, c6, c0, 2" : : "r"(v->ifar));
289 asm volatile ("mcrr p15, 0, %Q0, %R0, c7" : : "r"(v->par));
291 asm volatile ("mcr p15, 0, %0, c10, c2, 0" : : "r"(v->mair0));
292 asm volatile ("mcr p15, 0, %0, c10, c2, 1" : : "r"(v->mair1));
294 asm volatile ("mcr p15, 0, %0, c10, c3, 0" : : "r"(v->amair0));
295 asm volatile ("mcr p15, 0, %0, c10, c3, 1" : : "r"(v->amair1));
297 asm volatile ("mcr p15, 0, %0, c12, c0, 0" : : "r"(v->vbar));
299 asm volatile ("mcr p15, 0, %0, c13, c0, 0" : : "r"(v->fcseidr));
300 asm volatile ("mcr p15, 0, %0, c13, c0, 1" : : "r"(v->contextidr));
301 asm volatile ("mcr p15, 0, %0, c13, c0, 4" : : "r"(v->tpidrprw));
303 #define LOAD_GP_MODE(m) \
304 asm volatile ("msr SP_"#m ", %0" : : "r"(v->m.sp)); \
305 asm volatile ("msr LR_"#m ", %0" : : "r"(v->m.lr)); \
306 asm volatile ("msr SPSR_"#m ", %0" : : "r"(v->m.spsr))
312 asm volatile ("msr R8_fiq, %0" : : "r"(v->fiq_r8));
313 asm volatile ("msr R9_fiq, %0" : : "r"(v->fiq_r9));
314 asm volatile ("msr R10_fiq, %0" : : "r"(v->fiq_r10));
315 asm volatile ("msr R11_fiq, %0" : : "r"(v->fiq_r11));
316 asm volatile ("msr R12_fiq, %0" : : "r"(v->fiq_r12));
320 asm volatile ("mcrr p15, 3, %Q0, %R0, c14" : : "r" (v->cntv_cval));
321 asm volatile ("mcrr p15, 4, %Q0, %R0, c14" : : "r" (v->cntvoff));
322 asm volatile ("mcr p15, 0, %0, c14, c1, 0" : : "r" (v->cntkctl));
324 if ((_to_state & Thread_vcpu_user))
325 asm volatile ("mcr p15, 0, %0, c14, c3, 1" : : "r" (v->cntv_ctl));
327 asm volatile ("mcr p15, 0, %0, c14, c3, 1" : : "r"(0));
329 if ((_to_state & Thread_vcpu_user) && v->gic.hcr.en())
331 Gic_h::gic->vmcr(v->gic.vmcr);
332 Gic_h::gic->apr(v->gic.apr);
333 for (unsigned i = 0; i < Vm_state::Gic::N_lregs; ++i)
334 Gic_h::gic->lr(i, v->gic.lr[i]);
335 Gic_h::gic->hcr(v->gic.hcr);
338 Gic_h::gic->hcr(Gic_h::Hcr(0));
342 asm volatile ("mcr p15, 4, %0, c1, c1, 0"
343 : : "r"(Cpu::Hcr_tge | Cpu::Hcr_dc | Cpu::Hcr_must_set_bits));
344 // load normal SCTLR ...
345 asm volatile ("mcr p15, 0, %0, c1, c0, 0"
346 : : "r" ((Cpu::Cp15_c1_generic | Cpu::Cp15_c1_cache_bits) & ~Cpu::Cp15_c1_mmu));
347 asm volatile ("mcr p15, 0, %0, c1, c0, 2" : : "r" (0xf00000));
348 asm volatile ("mcr p15, 0, %0, c13, c0, 0" : : "r" (0));
349 asm volatile ("mcr p15, 0, %0, c13, c0, 1" : : "r" (0));
350 asm volatile ("mcr p15, 0, %0, c14, c3, 1" : : "r" (0)); // disable VTIMER
352 Gic_h::gic->hcr(Gic_h::Hcr(0));
356 IMPLEMENT_OVERRIDE inline NEEDS[Context::vm_state]
358 Context::arch_load_vcpu_kern_state(Vcpu_state *vcpu, bool do_load)
360 if (!(state() & Thread_ext_vcpu_enabled))
362 _tpidruro = vcpu->host_tpidruro;
368 Vm_state *v = vm_state(vcpu);
371 asm volatile ("mrc p15, 4, %0, c1, c1, 0" : "=r"(hcr));
373 hcr = access_once(&v->hcr);
375 v->guest_regs.hcr = hcr;
376 bool const all_priv_vm = !(hcr & Cpu::Hcr_tge);
379 // save guest state, load full host state
382 asm volatile ("mrc p15, 0, %0, c13, c0, 3"
383 : "=r"(vcpu->user_tpidruro));
384 asm volatile ("mrc p15, 0, %0, c1, c0, 0"
385 : "=r"(v->guest_regs.sctlr));
386 asm volatile ("mrc p15, 0, %0, c13, c0, 0"
387 : "=r"(v->guest_regs.fcseidr));
388 asm volatile ("mrc p15, 0, %0, c13, c0, 1"
389 : "=r"(v->guest_regs.contextidr));
391 asm volatile ("mrs %0, SP_svc" : "=r"(v->guest_regs.svc.sp));
392 asm volatile ("mrs %0, LR_svc" : "=r"(v->guest_regs.svc.lr));
393 asm volatile ("mrs %0, SPSR_svc" : "=r"(v->guest_regs.svc.spsr));
395 // fcse not supported in vmm
396 asm volatile ("mcr p15, 0, %0, c13, c0, 0" : : "r"(0));
397 asm volatile ("mcr p15, 0, %0, c13, c0, 1"
398 : : "r"(v->host_regs.contextidr));
400 asm volatile ("msr SP_svc, %0" : : "r"(v->host_regs.svc.sp));
401 asm volatile ("msr LR_svc, %0" : : "r"(v->host_regs.svc.lr));
402 asm volatile ("msr SPSR_svc, %0" : : "r"(v->host_regs.svc.spsr));
404 asm volatile ("mrc p15, 0, %0, c14, c3, 1" : "=r" (v->cntv_ctl));
406 asm volatile ("mcr p15, 0, %0, c14, c3, 1" : : "r"(0));
408 Gic_h::Hcr ghcr = Gic_h::gic->hcr();
412 v->gic.vmcr = Gic_h::gic->vmcr();
413 v->gic.misr = Gic_h::gic->misr();
414 for (unsigned i = 0; i < ((Vm_state::Gic::N_lregs + 31) / 32); ++i)
415 v->gic.eisr[i] = Gic_h::gic->eisr(i);
416 for (unsigned i = 0; i < ((Vm_state::Gic::N_lregs + 31) / 32); ++i)
417 v->gic.elsr[i] = Gic_h::gic->elsr(i);
418 v->gic.apr = Gic_h::gic->apr();
419 for (unsigned i = 0; i < Vm_state::Gic::N_lregs; ++i)
420 v->gic.lr[i] = Gic_h::gic->lr(i);
421 Gic_h::gic->hcr(Gic_h::Hcr(0));
426 vcpu->user_tpidruro = _tpidruro;
427 v->guest_regs.sctlr = v->sctlr;
428 v->guest_regs.fcseidr = v->fcseidr;
429 v->guest_regs.contextidr = v->contextidr;
430 v->guest_regs.svc = v->svc;
433 v->contextidr = v->host_regs.contextidr;
434 v->svc = v->host_regs.svc;
438 _tpidruro = vcpu->host_tpidruro;
441 asm volatile ("mcr p15, 0, %0, c13, c0, 3" : : "r"(vcpu->host_tpidruro));
442 asm volatile ("mcr p15, 4, %0, c1, c1, 0" : : "r"(Cpu::Hcr_must_set_bits | Cpu::Hcr_dc));
443 asm volatile ("mcr p15, 0, %0, c1, c0, 0" : : "r"(v->host_regs.sctlr));
447 v->hcr = Cpu::Hcr_must_set_bits | Cpu::Hcr_dc;
448 v->sctlr = v->host_regs.sctlr;
452 IMPLEMENT_OVERRIDE inline NEEDS[Context::vm_state]
454 Context::arch_load_vcpu_user_state(Vcpu_state *vcpu, bool do_load)
457 if (!(state() & Thread_ext_vcpu_enabled))
459 _tpidruro = vcpu->user_tpidruro;
465 Vm_state *v = vm_state(vcpu);
466 Unsigned32 hcr = access_once(&v->guest_regs.hcr) | Cpu::Hcr_must_set_bits;
467 bool const all_priv_vm = !(hcr & Cpu::Hcr_tge);
473 asm volatile ("mrc p15, 0, %0, c13, c0, 1"
474 : "=r"(v->host_regs.contextidr));
476 asm volatile ("mrs %0, SP_svc" : "=r"(v->host_regs.svc.sp));
477 asm volatile ("mrs %0, LR_svc" : "=r"(v->host_regs.svc.lr));
478 asm volatile ("mrs %0, SPSR_svc" : "=r"(v->host_regs.svc.spsr));
480 asm volatile ("mcr p15, 0, %0, c13, c0, 0"
481 : : "r"(v->guest_regs.fcseidr));
482 asm volatile ("mcr p15, 0, %0, c13, c0, 1"
483 : : "r"(v->guest_regs.contextidr));
485 asm volatile ("msr SP_svc, %0" : : "r"(v->guest_regs.svc.sp));
486 asm volatile ("msr LR_svc, %0" : : "r"(v->guest_regs.svc.lr));
487 asm volatile ("msr SPSR_svc, %0" : : "r"(v->guest_regs.svc.spsr));
489 asm volatile ("mcr p15, 0, %0, c14, c3, 1" : : "r" (v->cntv_ctl));
493 Gic_h::gic->vmcr(v->gic.vmcr);
494 Gic_h::gic->apr(v->gic.apr);
495 for (unsigned i = 0; i < Vm_state::Gic::N_lregs; ++i)
496 Gic_h::gic->lr(i, v->gic.lr[i]);
498 Gic_h::gic->hcr(v->gic.hcr);
502 v->host_regs.svc = v->svc;
504 v->fcseidr = v->guest_regs.fcseidr;
505 v->contextidr = v->guest_regs.contextidr;
506 v->svc = v->guest_regs.svc;
512 asm volatile ("mrc p15, 0, %0, c13, c0, 3" : "=r"(vcpu->host_tpidruro));
513 asm volatile ("mrc p15, 0, %0, c1, c0, 0" : "=r"(v->host_regs.sctlr));
515 asm volatile ("mcr p15, 4, %0, c1, c1, 0" : : "r"(hcr));
516 Unsigned32 sctlr = access_once(&v->guest_regs.sctlr);
517 if (hcr & Cpu::Hcr_tge)
518 sctlr &= ~Cpu::Cp15_c1_mmu;
519 asm volatile ("mcr p15, 0, %0, c1, c0, 0" : : "r"(sctlr));
520 asm volatile ("mcr p15, 0, %0, c13, c0, 3" : : "r"(vcpu->user_tpidruro));
521 _tpidruro = vcpu->user_tpidruro;
525 vcpu->host_tpidruro = _tpidruro;
526 _tpidruro = vcpu->user_tpidruro;
527 v->host_regs.sctlr = v->sctlr;
529 v->sctlr = v->guest_regs.sctlr;