20 #define ARM_cpsr uregs[16]
21 #define ARM_pc uregs[15]
22 #define ARM_lr uregs[14]
23 #define ARM_sp uregs[13]
24 #define ARM_ip uregs[12]
25 #define ARM_fp uregs[11]
26 #define ARM_r10 uregs[10]
27 #define ARM_r9 uregs[9]
28 #define ARM_r8 uregs[8]
29 #define ARM_r7 uregs[7]
30 #define ARM_r6 uregs[6]
31 #define ARM_r5 uregs[5]
32 #define ARM_r4 uregs[4]
33 #define ARM_r3 uregs[3]
34 #define ARM_r2 uregs[2]
35 #define ARM_r1 uregs[1]
36 #define ARM_r0 uregs[0]
37 #define ARM_ORIG_r0 uregs[17]
40 struct undef_hook *next;
41 unsigned long instr_mask;
42 unsigned long instr_val;
43 unsigned long cpsr_mask;
44 unsigned long cpsr_val;
45 int (*fn)(struct pt_regs *regs, unsigned int instr);
48 int register_undef_hook(struct undef_hook *hook);
50 /* Low level CPU specific IRQ handling code */
52 #if !defined(__thumb__)
53 /* Regular 32-bit ARM architecture */
55 #define WITH_IRQ_HANDLER_ARGS
60 __asm__ __volatile__( \
61 "mrs %0, cpsr @ sti\n" \
62 " bic %0, %0, #128\n" \
72 __asm__ __volatile__( \
73 "mrs %0, cpsr @ cli\n" \
74 " orr %0, %0, #128\n" \
81 #define save_and_cli(flags) \
84 (void) (&temp == &flags); \
85 __asm__ __volatile__( \
86 "mrs %0, cpsr @ save_and_cli\n" \
87 " orr %1, %0, #128\n" \
89 : "=r" (flags), "=r" (temp) \
94 #define save_flags(flags) \
96 __asm__ __volatile__( \
97 "mrs %0, cpsr @ save_flags\n" \
103 #define restore_flags(flags) \
104 __asm__ __volatile__( \
105 "msr cpsr_c, %0 @ restore_flags\n" \
111 /* FIQ handling code */
115 unsigned long temp; \
116 __asm__ __volatile__( \
117 "mrs %0, cpsr @ sti\n" \
118 " bic %0, %0, #64\n" \
127 unsigned long temp; \
128 __asm__ __volatile__( \
129 "mrs %0, cpsr @ cli\n" \
130 " orr %0, %0, #64\n" \
137 #define fiq_save_and_cli(flags) \
139 unsigned long temp; \
140 (void) (&temp == &flags); \
141 __asm__ __volatile__( \
142 "mrs %0, cpsr @ save_and_cli\n" \
143 " orr %1, %0, #192\n" \
145 : "=r" (flags), "=r" (temp) \
150 #elif defined(__thumb2__) || defined (__ARM_ARCH_6M__)
151 /* ARM Cortex-M3 architecture */
153 /* The interrupts are not delivered with argument,
154 it is retrieved independent way - irq_arch_get_irqidx */
155 #undef WITH_IRQ_HANDLER_ARGS
157 /* Offset between first interrupt source and exception table base */
158 #define IRQ_IRQIDX_OFFSET 16
162 __asm__ __volatile__( \
164 : : : "memory", "cc"); \
169 __asm__ __volatile__( \
171 : : : "memory", "cc"); \
174 #define save_and_cli(flags) \
176 unsigned long temp; \
177 (void) (&temp == &flags); \
178 __asm__ __volatile__( \
179 "mrs %0, primask @ save_and_cli\n" \
186 #define save_flags(flags) \
188 unsigned long temp; \
189 (void) (&temp == &flags); \
190 __asm__ __volatile__( \
191 "mrs %0, primask @ save_flags\n" \
197 #define restore_flags(flags) \
199 __asm__ __volatile__( \
200 "msr primask, %0 @ restore_flags\n" \
206 #define irq_arch_get_irqidx() \
208 unsigned long ipsr; \
209 __asm__ __volatile__( \
210 "mrs %0, ipsr @ get irqidx\n" \
215 #else /*defined(__thumb__)*/
217 #define WITH_IRQ_HANDLER_ARGS
218 /* Regular ARM architecture in THUMB mode */
220 void irq_fnc_sti(void);
221 #define sti irq_fnc_sti
222 void irq_fnc_cli(void);
223 #define cli irq_fnc_cli
224 unsigned long irq_fnc_save_and_cli(void);
225 #define save_and_cli(_flags) ((_flags)=irq_fnc_save_and_cli())
226 unsigned long irq_fnc_save_flags(void);
227 #define save_flags(_flags) ((_flags)=irq_fnc_save_flags())
228 void irq_fnc_restore_flags(unsigned long flags);
229 #define restore_flags irq_fnc_restore_flags
231 #endif /*defined(__thumb__)*/
233 void __cpu_coherent_range(unsigned long start, unsigned long end);
235 static inline void flush_icache_range(unsigned long start, unsigned long end)
237 __cpu_coherent_range(start, end);
240 /* atomic access routines */
242 //typedef unsigned long atomic_t;
244 static inline void atomic_clear_mask(unsigned long mask, volatile unsigned long *addr)
250 restore_flags(flags);
253 static inline void atomic_set_mask(unsigned long mask, volatile unsigned long *addr)
259 restore_flags(flags);
262 static inline void set_bit(int nr, volatile unsigned long *addr)
268 restore_flags(flags);
271 static inline void clear_bit(int nr, volatile unsigned long *addr)
277 restore_flags(flags);
280 static inline int test_bit(int nr, volatile unsigned long *addr)
282 return ((*addr) & (1<<nr))?1:0;
285 static inline int test_and_set_bit(int nr, volatile unsigned long *addr)
294 restore_flags(flags);
298 #if defined(__thumb2__) || defined (__ARM_ARCH_6M__)
302 #define __memory_barrier() \
303 __asm__ __volatile__("dmb": : : "memory")
305 #else /* old plain ARM architecture */
307 #define __memory_barrier() \
308 __asm__ __volatile__("": : : "memory")
312 /*masked fields macros*/
314 #define __val2mfld(mask,val) (((mask)&~((mask)<<1))*(val)&(mask))
315 #define __mfld2val(mask,val) (((val)&(mask))/((mask)&~((mask)<<1)))
317 static inline void outb(unsigned int port, int val) {
318 *(volatile unsigned char *)(port)=val;
321 static inline unsigned char inb(unsigned int port) {
322 return *(volatile unsigned char *)(port);
325 #define _WITHIN_CPU_DEF_H
326 #include <irq_generic.h>
327 #undef _WITHIN_CPU_DEF_H
329 extern void **irq_context_table;
330 extern irq_handler_t **irq_handler_table;
331 extern unsigned int irq_table_size;
333 /* Arithmetic functions */
335 /* ARM v5E architecture - DSP extension */
337 #define sat_add_slsl(__x,__y) \
338 __asm__ (" qadd %0,%0,%2\n" \
340 : "0" ((long)__x), "r" ((long)__y) : "cc"); \
342 #define sat_sub_slsl(__x,__y) \
343 __asm__ (" qsub %0,%0,%2\n" \
345 : "0" ((long)__x), "r" ((long)__y) : "cc"); \
347 #elif !defined(__thumb__)
348 /* Regular 32-bit ARM architecture */
350 #define sat_add_slsl(__x,__y) \
351 __asm__ (" adds %0,%2\n" \
352 " eorvs %0,%2,#0x80000000\n" \
353 " sbcvs %0,%0,%2\n" \
355 : "0" ((long)__x), "r" ((long)__y) : "cc"); \
357 #define sat_sub_slsl(__x,__y) \
358 __asm__ (" subs %0,%2\n" \
359 " eorvs %0,%2,#0x80000000\n" \
360 " sbcvs %0,%0,%2\n" \
362 : "0" ((long)__x), "r" ((long)__y) : "cc"); \
364 #elif defined(__thumb2__) || defined (__ARM_ARCH_6M__)
366 #define sat_add_slsl(__x,__y) \
367 __asm__ (" adds %0,%2\n" \
369 " eorsvs %0,%3,%2\n" \
370 " sbcsvs %0,%0,%2\n" \
372 : "0" ((long)__x), "r" ((long)__y), "r" (0x80000000): "cc"); \
374 #define sat_sub_slsl(__x,__y) \
375 __asm__ (" subs %0,%2\n" \
377 " eorsvs %0,%3,%2\n" \
378 " sbcsvs %0,%0,%2\n" \
380 : "0" ((long)__x), "r" ((long)__y), "r" (0x80000000) : "cc"); \
384 #endif /* _ARM_CPU_DEF_H */