Commit | Line | Data |
---|---|---|
e97c5b60 JQ |
1 | /* |
2 | * This file is subject to the terms and conditions of the GNU General Public | |
3 | * License. See the file "COPYING" in the main directory of this archive | |
4 | * for more details. | |
5 | * | |
6 | * Copyright (C) 1994, 95, 96, 97, 98, 99, 2003 by Ralf Baechle | |
7 | * Copyright (C) 1996 by Paul M. Antoine | |
8 | * Copyright (C) 1999 Silicon Graphics | |
9 | * Copyright (C) 2000 MIPS Technologies, Inc. | |
10 | */ | |
11 | #include <asm/irqflags.h> | |
12 | #include <asm/hazards.h> | |
13 | #include <linux/compiler.h> | |
14 | #include <linux/preempt.h> | |
15 | #include <linux/export.h> | |
02b849f7 | 16 | #include <linux/stringify.h> |
e97c5b60 | 17 | |
b633648c | 18 | #ifndef CONFIG_CPU_MIPSR2 |
e97c5b60 JQ |
19 | |
20 | /* | |
21 | * For cli() we have to insert nops to make sure that the new value | |
22 | * has actually arrived in the status register before the end of this | |
23 | * macro. | |
24 | * R4000/R4400 need three nops, the R4600 two nops and the R10000 needs | |
25 | * no nops at all. | |
26 | */ | |
27 | /* | |
28 | * For TX49, operating only IE bit is not enough. | |
29 | * | |
30 | * If mfc0 $12 follows store and the mfc0 is last instruction of a | |
31 | * page and fetching the next instruction causes TLB miss, the result | |
32 | * of the mfc0 might wrongly contain EXL bit. | |
33 | * | |
34 | * ERT-TX49H2-027, ERT-TX49H3-012, ERT-TX49HL3-006, ERT-TX49H4-008 | |
35 | * | |
36 | * Workaround: mask EXL bit of the result or place a nop before mfc0. | |
37 | */ | |
02b849f7 RB |
38 | notrace void arch_local_irq_disable(void) |
39 | { | |
40 | preempt_disable(); | |
41 | ||
42 | __asm__ __volatile__( | |
e97c5b60 JQ |
43 | " .set push \n" |
44 | " .set noat \n" | |
b633648c | 45 | #if defined(CONFIG_CPU_MIPSR2) |
e97c5b60 JQ |
46 | /* see irqflags.h for inline function */ |
47 | #else | |
48 | " mfc0 $1,$12 \n" | |
49 | " ori $1,0x1f \n" | |
50 | " xori $1,0x1f \n" | |
51 | " .set noreorder \n" | |
52 | " mtc0 $1,$12 \n" | |
53 | #endif | |
02b849f7 | 54 | " " __stringify(__irq_disable_hazard) " \n" |
e97c5b60 | 55 | " .set pop \n" |
02b849f7 RB |
56 | : /* no outputs */ |
57 | : /* no inputs */ | |
58 | : "memory"); | |
e97c5b60 | 59 | |
e97c5b60 JQ |
60 | preempt_enable(); |
61 | } | |
62 | EXPORT_SYMBOL(arch_local_irq_disable); | |
63 | ||
64 | ||
02b849f7 RB |
65 | notrace unsigned long arch_local_irq_save(void) |
66 | { | |
67 | unsigned long flags; | |
68 | ||
69 | preempt_disable(); | |
70 | ||
71 | __asm__ __volatile__( | |
e97c5b60 JQ |
72 | " .set push \n" |
73 | " .set reorder \n" | |
74 | " .set noat \n" | |
b633648c | 75 | #if defined(CONFIG_CPU_MIPSR2) |
e97c5b60 JQ |
76 | /* see irqflags.h for inline function */ |
77 | #else | |
02b849f7 RB |
78 | " mfc0 %[flags], $12 \n" |
79 | " ori $1, %[flags], 0x1f \n" | |
e97c5b60 JQ |
80 | " xori $1, 0x1f \n" |
81 | " .set noreorder \n" | |
82 | " mtc0 $1, $12 \n" | |
83 | #endif | |
02b849f7 | 84 | " " __stringify(__irq_disable_hazard) " \n" |
e97c5b60 | 85 | " .set pop \n" |
02b849f7 RB |
86 | : [flags] "=r" (flags) |
87 | : /* no inputs */ | |
88 | : "memory"); | |
e97c5b60 | 89 | |
e97c5b60 | 90 | preempt_enable(); |
02b849f7 | 91 | |
e97c5b60 JQ |
92 | return flags; |
93 | } | |
94 | EXPORT_SYMBOL(arch_local_irq_save); | |
95 | ||
02b849f7 RB |
96 | notrace void arch_local_irq_restore(unsigned long flags) |
97 | { | |
98 | unsigned long __tmp1; | |
99 | ||
02b849f7 | 100 | preempt_disable(); |
e97c5b60 | 101 | |
02b849f7 | 102 | __asm__ __volatile__( |
e97c5b60 JQ |
103 | " .set push \n" |
104 | " .set noreorder \n" | |
105 | " .set noat \n" | |
b633648c | 106 | #if defined(CONFIG_CPU_MIPSR2) && defined(CONFIG_IRQ_CPU) |
e97c5b60 JQ |
107 | /* see irqflags.h for inline function */ |
108 | #elif defined(CONFIG_CPU_MIPSR2) | |
109 | /* see irqflags.h for inline function */ | |
110 | #else | |
111 | " mfc0 $1, $12 \n" | |
02b849f7 | 112 | " andi %[flags], 1 \n" |
e97c5b60 JQ |
113 | " ori $1, 0x1f \n" |
114 | " xori $1, 0x1f \n" | |
02b849f7 RB |
115 | " or %[flags], $1 \n" |
116 | " mtc0 %[flags], $12 \n" | |
e97c5b60 | 117 | #endif |
02b849f7 | 118 | " " __stringify(__irq_disable_hazard) " \n" |
e97c5b60 | 119 | " .set pop \n" |
02b849f7 RB |
120 | : [flags] "=r" (__tmp1) |
121 | : "0" (flags) | |
122 | : "memory"); | |
e97c5b60 | 123 | |
e97c5b60 JQ |
124 | preempt_enable(); |
125 | } | |
126 | EXPORT_SYMBOL(arch_local_irq_restore); | |
127 | ||
128 | ||
f93a1a00 | 129 | notrace void __arch_local_irq_restore(unsigned long flags) |
e97c5b60 JQ |
130 | { |
131 | unsigned long __tmp1; | |
132 | ||
133 | preempt_disable(); | |
02b849f7 | 134 | |
e97c5b60 | 135 | __asm__ __volatile__( |
02b849f7 RB |
136 | " .set push \n" |
137 | " .set noreorder \n" | |
138 | " .set noat \n" | |
b633648c | 139 | #if defined(CONFIG_CPU_MIPSR2) && defined(CONFIG_IRQ_CPU) |
02b849f7 RB |
140 | /* see irqflags.h for inline function */ |
141 | #elif defined(CONFIG_CPU_MIPSR2) | |
142 | /* see irqflags.h for inline function */ | |
143 | #else | |
144 | " mfc0 $1, $12 \n" | |
145 | " andi %[flags], 1 \n" | |
146 | " ori $1, 0x1f \n" | |
147 | " xori $1, 0x1f \n" | |
148 | " or %[flags], $1 \n" | |
149 | " mtc0 %[flags], $12 \n" | |
150 | #endif | |
151 | " " __stringify(__irq_disable_hazard) " \n" | |
152 | " .set pop \n" | |
153 | : [flags] "=r" (__tmp1) | |
154 | : "0" (flags) | |
155 | : "memory"); | |
156 | ||
e97c5b60 JQ |
157 | preempt_enable(); |
158 | } | |
159 | EXPORT_SYMBOL(__arch_local_irq_restore); | |
160 | ||
b633648c | 161 | #endif /* !CONFIG_CPU_MIPSR2 */ |