Commit | Line | Data |
---|---|---|
1da177e4 LT |
1 | /* |
2 | * include/asm-v850/system.h -- Low-level interrupt/thread ops | |
3 | * | |
4 | * Copyright (C) 2001,02,03 NEC Electronics Corporation | |
5 | * Copyright (C) 2001,02,03 Miles Bader <miles@gnu.org> | |
6 | * | |
7 | * This file is subject to the terms and conditions of the GNU General | |
8 | * Public License. See the file COPYING in the main directory of this | |
9 | * archive for more details. | |
10 | * | |
11 | * Written by Miles Bader <miles@gnu.org> | |
12 | */ | |
13 | ||
14 | #ifndef __V850_SYSTEM_H__ | |
15 | #define __V850_SYSTEM_H__ | |
16 | ||
17 | #include <linux/linkage.h> | |
18 | #include <asm/ptrace.h> | |
19 | ||
20 | ||
1da177e4 LT |
21 | /* |
22 | * switch_to(n) should switch tasks to task ptr, first checking that | |
23 | * ptr isn't the current task, in which case it does nothing. | |
24 | */ | |
25 | struct thread_struct; | |
26 | extern void *switch_thread (struct thread_struct *last, | |
27 | struct thread_struct *next); | |
28 | #define switch_to(prev,next,last) \ | |
29 | do { \ | |
30 | if (prev != next) { \ | |
31 | (last) = switch_thread (&prev->thread, &next->thread); \ | |
32 | } \ | |
33 | } while (0) | |
34 | ||
35 | ||
36 | /* Enable/disable interrupts. */ | |
37 | #define local_irq_enable() __asm__ __volatile__ ("ei") | |
38 | #define local_irq_disable() __asm__ __volatile__ ("di") | |
39 | ||
40 | #define local_save_flags(flags) \ | |
41 | __asm__ __volatile__ ("stsr %1, %0" : "=r" (flags) : "i" (SR_PSW)) | |
42 | #define local_restore_flags(flags) \ | |
43 | __asm__ __volatile__ ("ldsr %0, %1" :: "r" (flags), "i" (SR_PSW)) | |
44 | ||
45 | /* For spinlocks etc */ | |
46 | #define local_irq_save(flags) \ | |
47 | do { local_save_flags (flags); local_irq_disable (); } while (0) | |
48 | #define local_irq_restore(flags) \ | |
49 | local_restore_flags (flags); | |
50 | ||
51 | ||
52 | static inline int irqs_disabled (void) | |
53 | { | |
54 | unsigned flags; | |
55 | local_save_flags (flags); | |
56 | return !!(flags & 0x20); | |
57 | } | |
58 | ||
59 | ||
60 | /* | |
61 | * Force strict CPU ordering. | |
62 | * Not really required on v850... | |
63 | */ | |
64 | #define nop() __asm__ __volatile__ ("nop") | |
65 | #define mb() __asm__ __volatile__ ("" ::: "memory") | |
66 | #define rmb() mb () | |
67 | #define wmb() mb () | |
68 | #define read_barrier_depends() ((void)0) | |
69 | #define set_rmb(var, value) do { xchg (&var, value); } while (0) | |
70 | #define set_mb(var, value) set_rmb (var, value) | |
1da177e4 LT |
71 | |
72 | #define smp_mb() mb () | |
73 | #define smp_rmb() rmb () | |
74 | #define smp_wmb() wmb () | |
75 | #define smp_read_barrier_depends() read_barrier_depends() | |
76 | ||
77 | #define xchg(ptr, with) \ | |
78 | ((__typeof__ (*(ptr)))__xchg ((unsigned long)(with), (ptr), sizeof (*(ptr)))) | |
1da177e4 | 79 | |
23f88fe4 | 80 | static inline unsigned long __xchg (unsigned long with, |
1da177e4 LT |
81 | __volatile__ void *ptr, int size) |
82 | { | |
83 | unsigned long tmp, flags; | |
84 | ||
85 | local_irq_save (flags); | |
86 | ||
87 | switch (size) { | |
88 | case 1: | |
89 | tmp = *(unsigned char *)ptr; | |
90 | *(unsigned char *)ptr = with; | |
91 | break; | |
92 | case 2: | |
93 | tmp = *(unsigned short *)ptr; | |
94 | *(unsigned short *)ptr = with; | |
95 | break; | |
96 | case 4: | |
97 | tmp = *(unsigned long *)ptr; | |
98 | *(unsigned long *)ptr = with; | |
99 | break; | |
100 | } | |
101 | ||
102 | local_irq_restore (flags); | |
103 | ||
104 | return tmp; | |
105 | } | |
106 | ||
107 | #define arch_align_stack(x) (x) | |
108 | ||
109 | #endif /* __V850_SYSTEM_H__ */ |