]> rtime.felk.cvut.cz Git - l4.git/blob - kernel/fiasco/src/kern/arm/asm_entry.h
Some minor fixes.
[l4.git] / kernel / fiasco / src / kern / arm / asm_entry.h
1 // vim:se ft=asms:
2 #pragma once
3
4 #include "globalconfig.h"
5 #include "config_tcbsize.h"
6 #include "tcboffset.h"
7
8 /****************************
9  * some handy definitions
10  */
11 #define RF_SIZE      20
12 #define RF_PSR       16
13 #define RF_PC        12
14 #define RF_SVC_LR     8
15 #define RF_USR_LR     4
16 #define RF_USR_SP     0
17 #define RF(reg, offs) (RF_##reg + (offs))
18
19 #define GET_HSR(ec) (ec << 26)
20
21 /**********************************************************************
22  * calculate the TCB address from a stack pointer
23  */
24 .macro CONTEXT_OF reg, ptr
25         bic     \reg, \ptr, #((THREAD_BLOCK_SIZE-1) & 0xff)
26         bic     \reg, \reg, #((THREAD_BLOCK_SIZE-1) & 0xff00)
27 .endm
28
29 /**********************************************************************
30  * Reset the thread cancel flag. 
31  * Register r0 is scratched and contains the thread state afterwards
32  */
33 .macro RESET_THREAD_CANCEL_AT tcb
34         ldr     r0, [\tcb, #(OFS__THREAD__STATE)]
35         bic     r0, r0, #VAL__Thread_cancel
36         str     r0, [\tcb, #(OFS__THREAD__STATE)]
37 .endm
38
39
40 /*****************************************************************************/
41 /* The syscall table stuff                                                   */
42 /*****************************************************************************/
43 .macro GEN_SYSCALL_TABLE
44 .align 4
45 .global sys_call_table
46 sys_call_table:
47         .word sys_kdb_ke
48         .word sys_kdb_ke
49         .word sys_ipc_wrapper
50         .word sys_arm_mem_op
51         .word sys_invoke_debug_wrapper
52         .word sys_kdb_ke
53         .word sys_kdb_ke
54         .word sys_kdb_ke
55         .word sys_kdb_ke
56         .word sys_kdb_ke
57         .word sys_kdb_ke
58 .endm
59
60 .macro GEN_VCPU_UPCALL THREAD_VCPU, LOAD_USR_SP, LOAD_USR_VCPU, USR_ONLY
61 .align 4
62 .global leave_by_vcpu_upcall;
63
64 leave_by_vcpu_upcall:
65         sub     sp, sp, #(RF_SIZE + 3*4)   @ restore old return frame
66         /* save r0, r1, r2 for scratch registers */
67         stmia   sp, {r0 - r2}
68
69         /* restore original IP */
70         CONTEXT_OF r1, sp
71         ldr     r2, [r1, #(\THREAD_VCPU)]
72         add     r2, r2, #(VAL__SIZEOF_TRAP_STATE - RF_SIZE)
73
74         /* r1 = current() */
75         /* r2 = &vcpu_state->ts.r[13] */
76
77         ldr     r0, [r1, #(OFS__THREAD__EXCEPTION_IP)]
78         str     r0, [r2, #RF(PC, 0)]
79         .if ! \USR_ONLY
80           ldr     r0, [r1, #(OFS__THREAD__STATE)]
81           tst     r0, #VAL__Thread_ext_vcpu_enabled
82         .endif
83         ldr     r0, [r1, #(OFS__THREAD__EXCEPTION_PSR)]
84         str     r0, [r2, #RF(PSR, 0)]
85         bic     r0, #0x20 // force ARM mode
86         .if ! \USR_ONLY
87           bicne   r0, #0xf
88           orrne   r0, #0x13
89         .endif
90         str     r0, [sp, #RF(PSR, 3*4)]
91
92         ldr     r0, [sp, #RF(USR_LR, 3*4)]
93         str     r0, [r2, #RF(USR_LR, 0)]
94
95         ldr     r0, [sp, #RF(USR_SP, 3*4)]
96         str     r0, [r2, #RF(USR_SP, 0)]
97
98         mov     r0, #~0
99         str     r0, [r1, #(OFS__THREAD__EXCEPTION_IP)]
100
101
102         stmdb   r2, {r3-r12}
103
104         /* Restore scratch registers saved previously */
105         ldr     r0, [sp, #8]
106         str     r0, [r2, #-44]
107
108         ldr     r0, [sp, #4]
109         str     r0, [r2, #-48]
110
111         ldr     r0, [sp]
112         str     r0, [r2, #-52]
113         sub     r2, r2, #64     @ now r2 points to the VCPU STATE again
114
115         add     sp, sp, #(3*4)
116
117         \LOAD_USR_SP r2
118
119         ldr     r0, [r2, #(OFS__VCPU_STATE__ENTRY_IP)]
120
121         str     r0, [sp, #RF(PC, 0)]
122         \LOAD_USR_VCPU r0, r2, r1
123         b       __iret
124
125 .endm
126
127 /**********************************************************************
128         kdebug entry
129  **********************************************************************/
130
131 .macro DEBUGGER_ENTRY type
132 #ifdef CONFIG_JDB
133         str     sp, [sp, #(RF(USR_SP, -RF_SIZE))] @ save r[13]
134         sub     sp, sp, #(RF_SIZE)
135
136         str     lr, [sp, #RF(SVC_LR, 0)]
137         str     lr, [sp, #RF(PC, 0)]
138         mrs     lr, cpsr
139         str     lr, [sp, #RF(PSR, 0)]
140
141         stmdb   sp!, {r0 - r12}
142         sub sp, sp, #4
143         mov     r0, #-1                 @ pfa
144         mov     r1, #GET_HSR(0x33)      @ err
145         orr     r1, #\type              @ + type
146         stmdb   sp!, {r0, r1}
147
148         mov     r0, sp
149         adr     lr, 1f
150         ldr     pc, 3f
151
152 1:
153         add     sp, sp, #12             @ pfa, err and tpidruro
154         ldmia   sp!, {r0 - r12}
155         ldr     lr, [sp, #RF(PSR, 0)]
156         msr     cpsr, lr
157         ldr     lr, [sp, #RF(SVC_LR, 0)]
158
159         ldr     sp, [sp, #(RF(USR_SP, 0))]
160         mov     pc, lr
161
162
163 3:      .word call_nested_trap_handler
164 #else
165         mov     pc, lr
166 #endif
167 .endm
168
169
170 .macro GEN_DEBUGGER_ENTRIES
171         .global kern_kdebug_entry
172         .align 4
173 kern_kdebug_entry:
174         DEBUGGER_ENTRY 0
175
176         .global kern_kdebug_sequence_entry
177         .align 4
178 kern_kdebug_sequence_entry:
179         DEBUGGER_ENTRY 1
180
181
182 #ifdef CONFIG_MP
183         .section ".text"
184         .global kern_kdebug_ipi_entry
185         .align 4
186 kern_kdebug_ipi_entry:
187         DEBUGGER_ENTRY 2
188         .previous
189 #endif
190
191 .endm
192
193 .macro align_and_save_sp orig_sp
194         mov     \orig_sp, sp
195         tst     sp, #4
196         subeq   sp, sp, #8
197         subne   sp, sp, #4
198         str     \orig_sp, [sp]
199 .endm
200
201 .macro  enter_slowtrap_w_stack errorcode, ec2 = 0
202         mov     r1, #\errorcode
203         .if \ec2 != 0
204            orr r1, r1, #\ec2
205         .endif
206         stmdb   sp!, {r0, r1}
207         align_and_save_sp r0
208         adr     lr, exception_return
209         ldr     pc, .LCslowtrap_entry
210 .endm
211
212 .macro GEN_EXCEPTION_RETURN
213         .global __return_from_user_invoke
214 exception_return:
215         disable_irqs
216         ldr     sp, [sp]
217 __return_from_user_invoke:
218         add     sp, sp, #12 // pfa, err & tpidruro
219         ldmia   sp!, {r0 - r12}
220         return_from_exception
221 .endm
222
223 .macro GEN_IRET
224         .global __iret
225 __iret:
226         return_from_exception
227 .endm
228
229 .macro GEN_LEAVE_BY_TRIGGER_EXCEPTION
230 .align 4
231 .global leave_by_trigger_exception
232
233 leave_by_trigger_exception:
234         sub     sp, sp, #RF_SIZE   @ restore old return frame
235         stmdb   sp!, {r0 - r12}
236
237         sub sp, sp, #4
238
239         /* restore original IP */
240         CONTEXT_OF r1, sp
241         ldr     r0, [r1, #(OFS__THREAD__EXCEPTION_IP)]
242         str     r0, [sp, #RF(PC, 14*4)]
243
244         ldr     r0, [r1, #(OFS__THREAD__EXCEPTION_PSR)]
245         str     r0, [sp, #RF(PSR, 14*4)]
246
247         mov     r0, #~0
248         str     r0, [r1, #(OFS__THREAD__EXCEPTION_IP)]
249
250         enter_slowtrap_w_stack GET_HSR(0x3e)
251 .endm