1 IMPLEMENTATION [arm && hyp]:
5 Thread::arch_init_vcpu_state(Vcpu_state *vcpu_state, bool ext)
7 if (!ext || (state() & Thread_ext_vcpu_enabled))
10 assert (check_for_current_cpu());
12 Vm_state *v = vm_state(vcpu_state);
15 v->sctlr = (Cpu::Cp15_c1_generic | Cpu::Cp15_c1_cache_bits) & ~(Cpu::Cp15_c1_mmu | (1 << 28));
24 v->guest_regs.hcr = Cpu::Hcr_tge;
25 v->guest_regs.sctlr = 0;
28 v->host_regs.svc.lr = regs()->ulr;
29 v->host_regs.svc.sp = regs()->sp();
30 v->svc.lr = regs()->ulr;
31 v->svc.sp = regs()->sp();
33 v->gic.hcr = Gic_h::Hcr(0);
36 if (current() == this)
38 asm volatile ("mcr p15, 4, %0, c1, c1, 0"
39 : : "r"((1 << 2) | Cpu::Hcr_dc | Cpu::Hcr_must_set_bits));
40 asm volatile ("mcr p15, 0, %0, c1, c0, 0" : : "r"(v->sctlr));
41 asm volatile ("msr SP_svc, %0" : : "r"(v->host_regs.svc.sp));
42 asm volatile ("msr LR_svc, %0" : : "r"(v->host_regs.svc.lr));
45 if (exception_triggered())
46 _exc_cont.flags(regs(), _exc_cont.flags(regs()) | Proc::PSR_m_svc);
48 regs()->psr |= Proc::PSR_m_svc;
51 extern "C" void slowtrap_entry(Trap_state *ts);
52 extern "C" Mword pagefault_entry(const Mword pfa, Mword error_code,
53 const Mword pc, Return_frame *ret_frame);
55 PUBLIC static inline template<typename T>
57 Thread::peek_user(T const *adr, Context *c)
60 asm ("mcr p15, 0, %1, c7, c8, 6 \n"
61 "mrc p15, 0, %0, c7, c4, 0 \n"
62 : "=r" (pa) : "r"(adr) );
63 if (EXPECT_TRUE(!(pa & 1)))
64 return *reinterpret_cast<T const *>(cxx::mask_lsb(pa, 12)
65 | cxx::get_lsb((Address)adr, 12));
67 c->set_kernel_mem_op_hit();
72 static Mword get_lr_for_mode(Return_frame const *rf)
75 switch (rf->psr & 0x1f)
81 asm ("mrs %0, lr_irq" : "=r" (ret)); return ret;
83 asm ("mrs %0, lr_fiq" : "=r" (ret)); return ret;
85 asm ("mrs %0, lr_abt" : "=r" (ret)); return ret;
87 asm ("mrs %0, lr_svc" : "=r" (ret)); return ret;
89 asm ("mrs %0, lr_und" : "=r" (ret)); return ret;
91 assert(false); // wrong processor mode
97 extern "C" void hyp_mode_fault(Mword abort_type, Trap_state *ts)
104 ts->hsr().ec() = abort_type ? 0x11 : 0;
105 printf("KERNEL%d: %s fault at %lx\n",
106 cxx::int_value<Cpu_number>(current_cpu()),
107 abort_type ? "SWI" : "Undefined instruction",
111 ts->hsr().ec() = 0x21;
112 asm volatile("mrc p15, 4, %0, c6, c0, 2" : "=r"(v));
113 printf("KERNEL%d: Instruction abort at %lx\n",
114 cxx::int_value<Cpu_number>(current_cpu()),
118 ts->hsr().ec() = 0x25;
119 asm volatile("mrc p15, 4, %0, c6, c0, 0" : "=r"(v));
120 printf("KERNEL%d: Data abort: pc=%lx pfa=%lx\n",
121 cxx::int_value<Cpu_number>(current_cpu()),
125 printf("KERNEL%d: Unknown hyp fault at %lx\n",
126 cxx::int_value<Cpu_number>(current_cpu()),
133 kdb_ke("In-kernel fault");
136 //-----------------------------------------------------------------------------
137 IMPLEMENTATION [arm && hyp && fpu]:
141 Thread::handle_fpu_trap(Trap_state *ts)
143 unsigned cond = ts->hsr().cv() ? ts->hsr().cond() : 0xe;
144 if (!Thread::condition_valid(cond, ts->psr))
146 // FPU insns are 32bit, even for thumb
147 assert (ts->hsr().il());
152 assert (!Fpu::is_enabled());
154 if (current_thread()->switchin_fpu())
157 // emulate the ARM exception entry PC
158 ts->pc += ts->psr & Proc::Status_thumb ? 2 : 4;
163 //-----------------------------------------------------------------------------
164 IMPLEMENTATION [arm && hyp]:
170 Thread::vcpu_vgic_upcall(unsigned virq)
172 assert (state() & Thread_ext_vcpu_enabled);
173 assert (state() & Thread_vcpu_user);
174 assert (!_exc_cont.valid(regs()));
176 Vcpu_state *vcpu = vcpu_state().access();
177 assert (vcpu_exceptions_enabled(vcpu));
179 Trap_state *ts = static_cast<Trap_state *>((Return_frame *)regs());
181 // Before entering kernel mode to have original fpu state before
183 save_fpu_state_to_utcb(ts, utcb().access());
185 check (vcpu_enter_kernel_mode(vcpu));
186 vcpu = vcpu_state().access();
188 vcpu->_regs.s.hsr().ec() = 0x3d;
189 vcpu->_regs.s.hsr().svc_imm() = virq;
191 vcpu_save_state_and_upcall();
195 class Arm_ppi_virt : public Irq_base
199 Arm_ppi_virt(unsigned irq, unsigned virq) : _virq(virq), _irq(irq)
201 set_hit(handler_wrapper<Arm_ppi_virt>);
206 printf("Allocate ARM PPI %d to virtual %d\n", _irq, _virq);
207 check (Irq_mgr::mgr->alloc(this, _irq));
208 chip()->unmask(pin());
212 void switch_mode(bool) {}
218 PUBLIC inline FIASCO_FLATTEN
220 Arm_ppi_virt::handle(Upstream_irq const *ui)
222 current_thread()->vcpu_vgic_upcall(_virq);
227 class Arm_vtimer_ppi : public Irq_base
230 Arm_vtimer_ppi(unsigned irq) : _irq(irq)
232 set_hit(handler_wrapper<Arm_vtimer_ppi>);
237 printf("Allocate ARM PPI %d to virtual %d\n", _irq, 1);
238 check (Irq_mgr::mgr->alloc(this, _irq));
239 chip()->unmask(pin());
243 void switch_mode(bool) {}
247 PUBLIC inline FIASCO_FLATTEN
249 Arm_vtimer_ppi::handle(Upstream_irq const *ui)
252 asm volatile("mrc p15, 0, %0, c14, c3, 1\n"
254 "mcr p15, 0, %0, c14, c3, 1\n" : "=r" (v));
255 current_thread()->vcpu_vgic_upcall(1);
260 static Arm_ppi_virt __vgic_irq(25, 0); // virtual GIC
261 static Arm_vtimer_ppi __vtimer_irq(27); // virtual timer
264 struct Local_irq_init
269 __vtimer_irq.alloc();
272 DEFINE_PER_CPU_LATE static Per_cpu<Local_irq_init> local_irqs;
278 is_syscall_pc(Address pc)
280 return Unsigned32(-0x2a) <= pc && pc <= Unsigned32(-0x08);
285 get_fault_ipa(Ts_error_code hsr, bool insn_abt, bool ext_vcpu)
289 asm ("mrc p15, 4, %0, c6, c0, 2" : "=r" (far));
291 asm ("mrc p15, 4, %0, c6, c0, 0" : "=r" (far));
293 if (EXPECT_TRUE(!ext_vcpu))
297 asm ("mrc p15, 0, %0, c1, c0, 0" : "=r" (sctlr));
298 if (!(sctlr & 1)) // stage 1 mmu disabled
301 if (hsr.pf_s1ptw()) // stage 1 walk
304 asm ("mrc p15, 4, %0, c6, c0, 4" : "=r" (ipa));
308 if ((hsr.pf_fsc() & 0x3c) != 0xc) // no permission fault
311 asm ("mrc p15, 4, %0, c6, c0, 4" : "=r" (ipa));
312 return (ipa << 8) | (far & 0xfff);
316 asm ("mcr p15, 0, %1, c7, c8, 0 \n"
317 "mrrc p15, 0, %Q0, %R0, c7 \n" : "=r"(par) : "r"(far));
320 return (par & 0xfffff000UL) | (far & 0xfff);
323 extern "C" void arm_hyp_entry(Return_frame *rf)
325 Trap_state *ts = static_cast<Trap_state*>(rf);
326 Thread *ct = current_thread();
329 asm ("mrc p15, 4, %0, c5, c2, 0" : "=r" (hsr));
330 ts->error_code = hsr.raw();
333 Mword state = ct->state();
338 tmp = get_fault_ipa(hsr, true, state & Thread_ext_vcpu_enabled);
339 if (!pagefault_entry(tmp, hsr.raw(), rf->pc, rf))
342 ts->pf_address = tmp;
348 tmp = get_fault_ipa(hsr, false, state & Thread_ext_vcpu_enabled);
349 if (!pagefault_entry(tmp, hsr.raw(), rf->pc, rf))
352 ts->pf_address = tmp;
360 Unsigned32 pc = rf->pc;
361 if (!is_syscall_pc(pc))
366 rf->pc = get_lr_for_mode(rf);
367 ct->state_del(Thread_cancel);
368 if (state & (Thread_vcpu_user | Thread_alien))
370 if (state & Thread_dis_alien)
371 ct->state_del_dirty(Thread_dis_alien);
379 typedef void Syscall(void);
380 extern Syscall *sys_call_table[];
381 sys_call_table[(-pc) / 4]();
385 case 0x00: // undef opcode with HCR.TGE=1
387 ct->state_del(Thread_cancel);
388 Mword state = ct->state();
389 Unsigned32 pc = rf->pc;
391 if (state & (Thread_vcpu_user | Thread_alien))
393 ts->pc += ts->psr & Proc::Status_thumb ? 2 : 4,
394 ct->send_exception(ts);
397 else if (EXPECT_FALSE(!is_syscall_pc(pc + 4)))
399 ts->pc += ts->psr & Proc::Status_thumb ? 2 : 4,
404 rf->pc = get_lr_for_mode(rf);
405 ct->state_del(Thread_cancel);
406 typedef void Syscall(void);
407 extern Syscall *sys_call_table[];
408 sys_call_table[-(pc + 4) / 4]();
415 if ((hsr.cpt_simd() || hsr.cpt_cpnr() == 10 || hsr.cpt_cpnr() == 11)
416 && Thread::handle_fpu_trap(ts))
419 ct->send_exception(ts);
423 case 0x03: // CP15 trapped
424 if (hsr.mcr_coproc_register() == hsr.mrc_coproc_register(0, 1, 0, 1))
426 ts->r[hsr.mcr_rt()] = 1 << 6;
427 ts->pc += 2 << hsr.il();
433 ct->send_exception(ts);