sub lr, lr, #\adjust
.endif
#ifdef CONFIG_ARM_V6PLUS
- clrex
+#ifdef CONFIG_ARM_1136
// todo: do clrex with strex for CPUs without clrex
+#else
+ clrex
+#endif
#endif
.if \atomic_fixup
atomic_fixup lr 0
ldrne lr, [sp, #RF(SVC_LR, -RF_SIZE)] @ load old kernel lr
rfedb sp
#else
- msr spsr, lr @ Load SPSR from kernel_lr
+ msr spsr_cfsx, lr @ Load SPSR from kernel_lr
ldr lr, [sp, #RF(PC, -RF_SIZE)] @ copy PC on psr field for
str lr, [sp, #RF(PSR, -RF_SIZE)] @ final ldmdb and proper ksp
ldrne lr, [sp, #RF(SVC_LR, -RF_SIZE)] @ load old kernel lr
/* Return */
ldr lr, [sp, #RF(PSR,0)]
- msr spsr, lr
+ msr spsr_cfsx, lr
@ ldmia sp, {sp,lr}^ @ done lazy
add sp, sp, #RF_SIZE
ldr lr, [sp, #RF(PC, -RF_SIZE)]
mrc p15, 0, r1, c5, c0, 1 @ Load IFSR into r1
bic r1, r1, #0x00ff0000
orr r1, r1, #0x00330000 @ Set read bit and prefetch abort
- ldr r0, [sp, #RF(PC, 5*4)] @ get PC from RF and use as pfa
+#if defined(CONFIG_ARM_V6PLUS) && !defined(CONFIG_ARM_1136)
+ mrc p15, 0, r0, c6, c0, 2 @ Read fault address, for T2: pfa != pc
+#else
+ ldr r0, [sp, #RF(PC, 5*4)] @ Get PC from RF and use as pfa
+#endif
mov r2, r0
add r3, sp, #(5*4)
stmdb sp!, {r0, r1}
.word sys_kdb_ke
.word sys_kdb_ke
/*SYSCALL(ipc)*/
- .word ipc_short_cut_wrapper
- .word sys_arm_cache_op
+ .word sys_ipc_wrapper
+ .word sys_arm_mem_op
SYSCALL(invoke_debug)
.word sys_kdb_ke
.word sys_kdb_ke
/* restore original IP */
CONTEXT_OF r1, sp
- ldr r2, [r1, #(OFS__THREAD__VCPU_STATE)]
+ /* access_vcpu() for the local case */
+ ldr r2, [r1, #(OFS__THREAD__USER_VCPU)]
add r2, r2, #(VAL__SIZEOF_TRAP_STATE - RF_SIZE)
ldr r0, [r1, #(OFS__THREAD__EXCEPTION_IP)]
ldr r0, [r2, #(-8 + OFS__VCPU_STATE__ENTRY_IP)]
str r0, [sp, #RF(PC, 0)]
+ add r0, r2, #(-8)
- b __iret
+ b __iret
kernel_prefetch_abort_label: .string "Kernel prefetch abort"
kdebug entry
**********************************************************************/
-.macro DEBUGGER_ENTRY errorcode
+.macro DEBUGGER_ENTRY type
#ifdef CONFIG_JDB
str sp, [sp, #(RF(USR_SP, -RF_SIZE))] @ save r[13]
sub sp, sp, #(RF_SIZE)
stmdb sp!, {r0 - r12}
mov r0, #-1 @ pfa
- mov r1, #\errorcode @ err
+ mov r1, #0x00e00000 @ err
+ orr r1, #\type @ + type
stmdb sp!, {r0, r1}
mov r0, sp
.global kern_kdebug_entry
.align 4
kern_kdebug_entry:
- DEBUGGER_ENTRY 0x00e00000
+ DEBUGGER_ENTRY 0
+
+ .global kern_kdebug_sequence_entry
+ .align 4
+kern_kdebug_sequence_entry:
+ DEBUGGER_ENTRY 1
#ifdef CONFIG_MP
.global kern_kdebug_ipi_entry
.align 4
kern_kdebug_ipi_entry:
- DEBUGGER_ENTRY 0x00f00000
+ DEBUGGER_ENTRY 2
.previous
#endif
mrc p15, 0, r1, c13, c0, 1 @ read CP15_CID
stmia r0!, {r1}
+ // tls regs are banked
+ mrc p15, 0, r1, c13, c0, 2 @ read CP15_TLS1
+ stmia r0!, {r1}
+
+ mrc p15, 0, r1, c13, c0, 3 @ read CP15_TLS2
+ stmia r0!, {r1}
+
+ mrc p15, 0, r1, c13, c0, 4 @ read CP15_TLS3
+ stmia r0!, {r1}
+
+ mrc p10, 7, r1, cr8, cr0, 0 @ fpexc
+ stmia r0!, {r1}
+
// switch to secure world
mov r1, #0
mcr p15, 0, r1, c1, c1, 0
and r1, r1, #0x1c0
mcr p15, 0, r1, c12, c1, 1
-#if 0
+#if 1
// switch to non-secure world
mov r1, #1
mcr p15, 0, r1, c1, c1, 0
ldmia r0!, {r1}
mcr p15, 0, r1, c13, c0, 1 @ write CP15_CID
+ // tls regs are banked
+ ldmia r0!, {r1}
+ mcr p15, 0, r1, c13, c0, 2 @ write CP15_TLS1
+
+ ldmia r0!, {r1}
+ mcr p15, 0, r1, c13, c0, 3 @ write CP15_TLS2
+
+ ldmia r0!, {r1}
+ mcr p15, 0, r1, c13, c0, 4 @ write CP15_TLS3
+
+ ldmia r0!, {r1}
+ mcr p10, 7, r1, cr8, cr0, 0 @ fpexc
+
// switch to secure world
mov r1, #0
mcr p15, 0, r1, c1, c1, 0
ISB_OP r1
-
- xxx
#endif
// load gen-regs