sub lr, lr, #\adjust
.endif
#ifdef CONFIG_ARM_V6PLUS
- clrex
+#ifdef CONFIG_ARM_1136
// todo: do clrex with strex for CPUs without clrex
+#else
+ clrex
+#endif
#endif
.if \atomic_fixup
atomic_fixup lr 0
ldrne lr, [sp, #RF(SVC_LR, -RF_SIZE)] @ load old kernel lr
rfedb sp
#else
- msr spsr, lr @ Load SPSR from kernel_lr
+ msr spsr_cfsx, lr @ Load SPSR from kernel_lr
ldr lr, [sp, #RF(PC, -RF_SIZE)] @ copy PC on psr field for
str lr, [sp, #RF(PSR, -RF_SIZE)] @ final ldmdb and proper ksp
ldrne lr, [sp, #RF(SVC_LR, -RF_SIZE)] @ load old kernel lr
/* Return */
ldr lr, [sp, #RF(PSR,0)]
- msr spsr, lr
+ msr spsr_cfsx, lr
@ ldmia sp, {sp,lr}^ @ done lazy
add sp, sp, #RF_SIZE
ldr lr, [sp, #RF(PC, -RF_SIZE)]
mrc p15, 0, r1, c5, c0, 1 @ Load IFSR into r1
bic r1, r1, #0x00ff0000
orr r1, r1, #0x00330000 @ Set read bit and prefetch abort
- ldr r0, [sp, #RF(PC, 5*4)] @ get PC from RF and use as pfa
+#if defined(CONFIG_ARM_V6PLUS) && !defined(CONFIG_ARM_1136)
+ mrc p15, 0, r0, c6, c0, 2 @ Read fault address, for T2: pfa != pc
+#else
+ ldr r0, [sp, #RF(PC, 5*4)] @ Get PC from RF and use as pfa
+#endif
mov r2, r0
add r3, sp, #(5*4)
stmdb sp!, {r0, r1}
.word sys_kdb_ke
.word sys_kdb_ke
/*SYSCALL(ipc)*/
- .word ipc_short_cut_wrapper
- .word sys_arm_cache_op
+ .word sys_ipc_wrapper
+ .word sys_arm_mem_op
SYSCALL(invoke_debug)
.word sys_kdb_ke
.word sys_kdb_ke
/* restore original IP */
CONTEXT_OF r1, sp
- ldr r2, [r1, #(OFS__THREAD__VCPU_STATE)]
+ /* access_vcpu() for the local case */
+ ldr r2, [r1, #(OFS__THREAD__USER_VCPU)]
add r2, r2, #(VAL__SIZEOF_TRAP_STATE - RF_SIZE)
ldr r0, [r1, #(OFS__THREAD__EXCEPTION_IP)]
ldr r0, [r2, #(-8 + OFS__VCPU_STATE__ENTRY_IP)]
str r0, [sp, #RF(PC, 0)]
+ add r0, r2, #(-8)
- b __iret
+ b __iret
kernel_prefetch_abort_label: .string "Kernel prefetch abort"
#ifdef CONFIG_ARM_TZ
+.macro ISB_OP reg
+#ifdef CONFIG_ARM_V7
+ isb
+#else
+ mcr p15, 0, lr, c7, c5, 4 @ cp15isb
+#endif
+.endm
+
/**********************************************************************
* Secure and Nonsecure switching stuff
*
// switch to non-secure world
mov r1, #1
mcr p15, 0, r1, c1, c1, 0
- isb
+ ISB_OP r1
mrc p15, 0, r1, c2, c0, 0 @ read CP15_TTB0
stmia r0!, {r1}
mrc p15, 0, r1, c13, c0, 1 @ read CP15_CID
stmia r0!, {r1}
+ // tls regs are banked
+ mrc p15, 0, r1, c13, c0, 2 @ read CP15_TLS1
+ stmia r0!, {r1}
+
+ mrc p15, 0, r1, c13, c0, 3 @ read CP15_TLS2
+ stmia r0!, {r1}
+
+ mrc p15, 0, r1, c13, c0, 4 @ read CP15_TLS3
+ stmia r0!, {r1}
+
+ mrc p10, 7, r1, cr8, cr0, 0 @ fpexc
+ stmia r0!, {r1}
+
// switch to secure world
mov r1, #0
mcr p15, 0, r1, c1, c1, 0
- isb
+ ISB_OP r1
mrc p15, 0, r1, c5, c0, 0 @ read CP15_DFSR
stmia r0!, {r1}
and r1, r1, #0x1c0
mcr p15, 0, r1, c12, c1, 1
-#if 0
+#if 1
// switch to non-secure world
mov r1, #1
mcr p15, 0, r1, c1, c1, 0
- isb
+ ISB_OP r1
ldmia r0!, {r1}
mcr p15, 0, r1, c2, c0, 0 @ write CP15_TTB0
ldmia r0!, {r1}
mcr p15, 0, r1, c13, c0, 1 @ write CP15_CID
+ // tls regs are banked
+ ldmia r0!, {r1}
+ mcr p15, 0, r1, c13, c0, 2 @ write CP15_TLS1
+
+ ldmia r0!, {r1}
+ mcr p15, 0, r1, c13, c0, 3 @ write CP15_TLS2
+
+ ldmia r0!, {r1}
+ mcr p15, 0, r1, c13, c0, 4 @ write CP15_TLS3
+
+ ldmia r0!, {r1}
+ mcr p10, 7, r1, cr8, cr0, 0 @ fpexc
+
// switch to secure world
mov r1, #0
mcr p15, 0, r1, c1, c1, 0
- isb
-
- xxx
+ ISB_OP r1
#endif
// load gen-regs
.macro SWITCH_TO_NONSECURE_MODE
mov lr, #0xf
mcr p15, 0, lr, c1, c1, 0
- isb
+ ISB_OP lr
.endm
.macro SWITCH_TO_SECURE_MODE
mov lr, #0x0
mcr p15, 0, lr, c1, c1, 0
- isb
+ ISB_OP lr
.endm