Commit | Line | Data |
---|---|---|
b2441318 | 1 | /* SPDX-License-Identifier: GPL-2.0 */ |
b4816afa DH |
2 | /* |
3 | * Generic UP xchg and cmpxchg using interrupt disablement. Does not | |
4 | * support SMP. | |
5 | */ | |
6 | ||
068fbad2 MD |
7 | #ifndef __ASM_GENERIC_CMPXCHG_H |
8 | #define __ASM_GENERIC_CMPXCHG_H | |
9 | ||
068fbad2 MD |
10 | #ifdef CONFIG_SMP |
11 | #error "Cannot use generic cmpxchg on SMP" | |
12 | #endif | |
13 | ||
80da6a4f | 14 | #include <linux/types.h> |
b4816afa DH |
15 | #include <linux/irqflags.h> |
16 | ||
b4816afa DH |
17 | /* |
18 | * This function doesn't exist, so you'll get a linker error if | |
19 | * something tries to do an invalidly-sized xchg(). | |
20 | */ | |
82b993e8 | 21 | extern void __generic_xchg_called_with_bad_pointer(void); |
b4816afa DH |
22 | |
23 | static inline | |
82b993e8 | 24 | unsigned long __generic_xchg(unsigned long x, volatile void *ptr, int size) |
b4816afa DH |
25 | { |
26 | unsigned long ret, flags; | |
27 | ||
28 | switch (size) { | |
29 | case 1: | |
30 | #ifdef __xchg_u8 | |
31 | return __xchg_u8(x, ptr); | |
32 | #else | |
33 | local_irq_save(flags); | |
34 | ret = *(volatile u8 *)ptr; | |
656e9007 | 35 | *(volatile u8 *)ptr = (x & 0xffu); |
b4816afa DH |
36 | local_irq_restore(flags); |
37 | return ret; | |
38 | #endif /* __xchg_u8 */ | |
39 | ||
40 | case 2: | |
41 | #ifdef __xchg_u16 | |
42 | return __xchg_u16(x, ptr); | |
43 | #else | |
44 | local_irq_save(flags); | |
45 | ret = *(volatile u16 *)ptr; | |
656e9007 | 46 | *(volatile u16 *)ptr = (x & 0xffffu); |
b4816afa DH |
47 | local_irq_restore(flags); |
48 | return ret; | |
49 | #endif /* __xchg_u16 */ | |
50 | ||
51 | case 4: | |
52 | #ifdef __xchg_u32 | |
53 | return __xchg_u32(x, ptr); | |
54 | #else | |
55 | local_irq_save(flags); | |
56 | ret = *(volatile u32 *)ptr; | |
656e9007 | 57 | *(volatile u32 *)ptr = (x & 0xffffffffu); |
b4816afa DH |
58 | local_irq_restore(flags); |
59 | return ret; | |
60 | #endif /* __xchg_u32 */ | |
61 | ||
62 | #ifdef CONFIG_64BIT | |
63 | case 8: | |
64 | #ifdef __xchg_u64 | |
65 | return __xchg_u64(x, ptr); | |
66 | #else | |
67 | local_irq_save(flags); | |
68 | ret = *(volatile u64 *)ptr; | |
69 | *(volatile u64 *)ptr = x; | |
70 | local_irq_restore(flags); | |
71 | return ret; | |
72 | #endif /* __xchg_u64 */ | |
73 | #endif /* CONFIG_64BIT */ | |
74 | ||
75 | default: | |
82b993e8 | 76 | __generic_xchg_called_with_bad_pointer(); |
b4816afa DH |
77 | return x; |
78 | } | |
79 | } | |
80 | ||
82b993e8 MR |
81 | #define generic_xchg(ptr, x) ({ \ |
82 | ((__typeof__(*(ptr))) \ | |
83 | __generic_xchg((unsigned long)(x), (ptr), sizeof(*(ptr)))); \ | |
d975440b | 84 | }) |
b4816afa | 85 | |
068fbad2 MD |
86 | /* |
87 | * Atomic compare and exchange. | |
068fbad2 | 88 | */ |
b4816afa DH |
89 | #include <asm-generic/cmpxchg-local.h> |
90 | ||
82b993e8 MR |
91 | #define generic_cmpxchg_local(ptr, o, n) ({ \ |
92 | ((__typeof__(*(ptr)))__generic_cmpxchg_local((ptr), (unsigned long)(o), \ | |
93 | (unsigned long)(n), sizeof(*(ptr)))); \ | |
d975440b | 94 | }) |
82b993e8 MR |
95 | |
96 | #define generic_cmpxchg64_local(ptr, o, n) \ | |
97 | __generic_cmpxchg64_local((ptr), (o), (n)) | |
98 | ||
99 | ||
82b993e8 MR |
100 | #ifndef arch_xchg |
101 | #define arch_xchg generic_xchg | |
102 | #endif | |
103 | ||
104 | #ifndef arch_cmpxchg_local | |
105 | #define arch_cmpxchg_local generic_cmpxchg_local | |
106 | #endif | |
107 | ||
108 | #ifndef arch_cmpxchg64_local | |
109 | #define arch_cmpxchg64_local generic_cmpxchg64_local | |
110 | #endif | |
111 | ||
112 | #define arch_cmpxchg arch_cmpxchg_local | |
113 | #define arch_cmpxchg64 arch_cmpxchg64_local | |
114 | ||
b4816afa | 115 | #endif /* __ASM_GENERIC_CMPXCHG_H */ |