Commit | Line | Data |
---|---|---|
1da177e4 LT |
1 | #ifndef __ASM_CRIS_SYSTEM_H |
2 | #define __ASM_CRIS_SYSTEM_H | |
3 | ||
4 | #include <asm/arch/system.h> | |
5 | ||
6 | /* the switch_to macro calls resume, an asm function in entry.S which does the actual | |
7 | * task switching. | |
8 | */ | |
9 | ||
10 | extern struct task_struct *resume(struct task_struct *prev, struct task_struct *next, int); | |
1da177e4 LT |
11 | #define switch_to(prev,next,last) last = resume(prev,next, \ |
12 | (int)&((struct task_struct *)0)->thread) | |
13 | ||
14 | #define barrier() __asm__ __volatile__("": : :"memory") | |
15 | #define mb() barrier() | |
16 | #define rmb() mb() | |
17 | #define wmb() mb() | |
18 | #define read_barrier_depends() do { } while(0) | |
19 | #define set_mb(var, value) do { var = value; mb(); } while (0) | |
1da177e4 LT |
20 | |
21 | #ifdef CONFIG_SMP | |
22 | #define smp_mb() mb() | |
23 | #define smp_rmb() rmb() | |
24 | #define smp_wmb() wmb() | |
25 | #define smp_read_barrier_depends() read_barrier_depends() | |
26 | #else | |
27 | #define smp_mb() barrier() | |
28 | #define smp_rmb() barrier() | |
29 | #define smp_wmb() barrier() | |
30 | #define smp_read_barrier_depends() do { } while(0) | |
31 | #endif | |
32 | ||
33 | #define iret() | |
34 | ||
35 | /* | |
36 | * disable hlt during certain critical i/o operations | |
37 | */ | |
38 | #define HAVE_DISABLE_HLT | |
39 | void disable_hlt(void); | |
40 | void enable_hlt(void); | |
41 | ||
d9b5444e | 42 | static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int size) |
1da177e4 LT |
43 | { |
44 | /* since Etrax doesn't have any atomic xchg instructions, we need to disable | |
45 | irq's (if enabled) and do it with move.d's */ | |
46 | unsigned long flags,temp; | |
047c7c42 | 47 | local_irq_save(flags); /* save flags, including irq enable bit and shut off irqs */ |
1da177e4 LT |
48 | switch (size) { |
49 | case 1: | |
50 | *((unsigned char *)&temp) = x; | |
51 | x = *(unsigned char *)ptr; | |
52 | *(unsigned char *)ptr = *((unsigned char *)&temp); | |
53 | break; | |
54 | case 2: | |
55 | *((unsigned short *)&temp) = x; | |
56 | x = *(unsigned short *)ptr; | |
57 | *(unsigned short *)ptr = *((unsigned short *)&temp); | |
58 | break; | |
59 | case 4: | |
60 | temp = x; | |
61 | x = *(unsigned long *)ptr; | |
62 | *(unsigned long *)ptr = temp; | |
63 | break; | |
64 | } | |
65 | local_irq_restore(flags); /* restore irq enable bit */ | |
66 | return x; | |
67 | } | |
68 | ||
7732ba3a MD |
69 | #include <asm-generic/cmpxchg-local.h> |
70 | ||
71 | /* | |
72 | * cmpxchg_local and cmpxchg64_local are atomic wrt current CPU. Always make | |
73 | * them available. | |
74 | */ | |
75 | #define cmpxchg_local(ptr, o, n) \ | |
76 | ((__typeof__(*(ptr)))__cmpxchg_local_generic((ptr), (unsigned long)(o),\ | |
77 | (unsigned long)(n), sizeof(*(ptr)))) | |
78 | #define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n)) | |
79 | ||
80 | #ifndef CONFIG_SMP | |
81 | #include <asm-generic/cmpxchg.h> | |
82 | #endif | |
83 | ||
1da177e4 LT |
84 | #define arch_align_stack(x) (x) |
85 | ||
cdb04527 AB |
86 | void default_idle(void); |
87 | ||
1da177e4 | 88 | #endif |