Commit | Line | Data |
---|---|---|
f405df5d PZ |
1 | #ifndef _LINUX_REFCOUNT_H |
2 | #define _LINUX_REFCOUNT_H | |
3 | ||
f405df5d | 4 | #include <linux/atomic.h> |
f405df5d PZ |
5 | #include <linux/mutex.h> |
6 | #include <linux/spinlock.h> | |
318b1ded | 7 | #include <linux/kernel.h> |
f405df5d | 8 | |
bd174169 DW |
9 | /** |
10 | * refcount_t - variant of atomic_t specialized for reference counts | |
11 | * @refs: atomic_t counter field | |
12 | * | |
13 | * The counter saturates at UINT_MAX and will not move once | |
14 | * there. This avoids wrapping the counter and causing 'spurious' | |
15 | * use-after-free bugs. | |
16 | */ | |
f405df5d PZ |
17 | typedef struct refcount_struct { |
18 | atomic_t refs; | |
19 | } refcount_t; | |
20 | ||
21 | #define REFCOUNT_INIT(n) { .refs = ATOMIC_INIT(n), } | |
22 | ||
bd174169 DW |
23 | /** |
24 | * refcount_set - set a refcount's value | |
25 | * @r: the refcount | |
26 | * @n: value to which the refcount will be set | |
27 | */ | |
f405df5d PZ |
28 | static inline void refcount_set(refcount_t *r, unsigned int n) |
29 | { | |
30 | atomic_set(&r->refs, n); | |
31 | } | |
32 | ||
bd174169 DW |
33 | /** |
34 | * refcount_read - get a refcount's value | |
35 | * @r: the refcount | |
36 | * | |
37 | * Return: the refcount's value | |
38 | */ | |
f405df5d PZ |
39 | static inline unsigned int refcount_read(const refcount_t *r) |
40 | { | |
41 | return atomic_read(&r->refs); | |
42 | } | |
43 | ||
fd25d19f | 44 | #ifdef CONFIG_REFCOUNT_FULL |
29dee3c0 PZ |
45 | extern __must_check bool refcount_add_not_zero(unsigned int i, refcount_t *r); |
46 | extern void refcount_add(unsigned int i, refcount_t *r); | |
f405df5d | 47 | |
29dee3c0 PZ |
48 | extern __must_check bool refcount_inc_not_zero(refcount_t *r); |
49 | extern void refcount_inc(refcount_t *r); | |
f405df5d | 50 | |
29dee3c0 | 51 | extern __must_check bool refcount_sub_and_test(unsigned int i, refcount_t *r); |
f405df5d | 52 | |
29dee3c0 PZ |
53 | extern __must_check bool refcount_dec_and_test(refcount_t *r); |
54 | extern void refcount_dec(refcount_t *r); | |
fd25d19f | 55 | #else |
7a46ec0e KC |
56 | # ifdef CONFIG_ARCH_HAS_REFCOUNT |
57 | # include <asm/refcount.h> | |
58 | # else | |
fd25d19f KC |
59 | static inline __must_check bool refcount_add_not_zero(unsigned int i, refcount_t *r) |
60 | { | |
61 | return atomic_add_unless(&r->refs, i, 0); | |
62 | } | |
63 | ||
64 | static inline void refcount_add(unsigned int i, refcount_t *r) | |
65 | { | |
66 | atomic_add(i, &r->refs); | |
67 | } | |
68 | ||
69 | static inline __must_check bool refcount_inc_not_zero(refcount_t *r) | |
70 | { | |
71 | return atomic_add_unless(&r->refs, 1, 0); | |
72 | } | |
73 | ||
74 | static inline void refcount_inc(refcount_t *r) | |
75 | { | |
76 | atomic_inc(&r->refs); | |
77 | } | |
78 | ||
79 | static inline __must_check bool refcount_sub_and_test(unsigned int i, refcount_t *r) | |
80 | { | |
81 | return atomic_sub_and_test(i, &r->refs); | |
82 | } | |
83 | ||
fd25d19f KC |
84 | static inline __must_check bool refcount_dec_and_test(refcount_t *r) |
85 | { | |
86 | return atomic_dec_and_test(&r->refs); | |
87 | } | |
88 | ||
89 | static inline void refcount_dec(refcount_t *r) | |
90 | { | |
91 | atomic_dec(&r->refs); | |
92 | } | |
7a46ec0e | 93 | # endif /* !CONFIG_ARCH_HAS_REFCOUNT */ |
fd25d19f | 94 | #endif /* CONFIG_REFCOUNT_FULL */ |
f405df5d | 95 | |
29dee3c0 PZ |
96 | extern __must_check bool refcount_dec_if_one(refcount_t *r); |
97 | extern __must_check bool refcount_dec_not_one(refcount_t *r); | |
98 | extern __must_check bool refcount_dec_and_mutex_lock(refcount_t *r, struct mutex *lock); | |
99 | extern __must_check bool refcount_dec_and_lock(refcount_t *r, spinlock_t *lock); | |
f405df5d PZ |
100 | |
101 | #endif /* _LINUX_REFCOUNT_H */ |