64
64
65
65
/* Macro for generating the ***_bits() functions */
66
66
#define DEFINE_BITOP (fn , op , prefix ) \
67
- static __inline__ void fn(unsigned long mask, \
67
+ static inline void fn(unsigned long mask, \
68
68
volatile unsigned long *_p) \
69
69
{ \
70
70
unsigned long old; \
@@ -86,30 +86,30 @@ DEFINE_BITOP(clear_bits, andc, "")
86
86
DEFINE_BITOP (clear_bits_unlock , andc , PPC_RELEASE_BARRIER )
87
87
DEFINE_BITOP (change_bits , xor , "" )
88
88
89
- static __inline__ void set_bit (int nr , volatile unsigned long * addr )
89
+ static inline void arch_set_bit (int nr , volatile unsigned long * addr )
90
90
{
91
91
set_bits (BIT_MASK (nr ), addr + BIT_WORD (nr ));
92
92
}
93
93
94
- static __inline__ void clear_bit (int nr , volatile unsigned long * addr )
94
+ static inline void arch_clear_bit (int nr , volatile unsigned long * addr )
95
95
{
96
96
clear_bits (BIT_MASK (nr ), addr + BIT_WORD (nr ));
97
97
}
98
98
99
- static __inline__ void clear_bit_unlock (int nr , volatile unsigned long * addr )
99
+ static inline void arch_clear_bit_unlock (int nr , volatile unsigned long * addr )
100
100
{
101
101
clear_bits_unlock (BIT_MASK (nr ), addr + BIT_WORD (nr ));
102
102
}
103
103
104
- static __inline__ void change_bit (int nr , volatile unsigned long * addr )
104
+ static inline void arch_change_bit (int nr , volatile unsigned long * addr )
105
105
{
106
106
change_bits (BIT_MASK (nr ), addr + BIT_WORD (nr ));
107
107
}
108
108
109
109
/* Like DEFINE_BITOP(), with changes to the arguments to 'op' and the output
110
110
* operands. */
111
111
#define DEFINE_TESTOP (fn , op , prefix , postfix , eh ) \
112
- static __inline__ unsigned long fn( \
112
+ static inline unsigned long fn( \
113
113
unsigned long mask, \
114
114
volatile unsigned long *_p) \
115
115
{ \
@@ -138,34 +138,34 @@ DEFINE_TESTOP(test_and_clear_bits, andc, PPC_ATOMIC_ENTRY_BARRIER,
138
138
DEFINE_TESTOP (test_and_change_bits , xor , PPC_ATOMIC_ENTRY_BARRIER ,
139
139
PPC_ATOMIC_EXIT_BARRIER , 0 )
140
140
141
- static __inline__ int test_and_set_bit (unsigned long nr ,
142
- volatile unsigned long * addr )
141
+ static inline int arch_test_and_set_bit (unsigned long nr ,
142
+ volatile unsigned long * addr )
143
143
{
144
144
return test_and_set_bits (BIT_MASK (nr ), addr + BIT_WORD (nr )) != 0 ;
145
145
}
146
146
147
- static __inline__ int test_and_set_bit_lock (unsigned long nr ,
148
- volatile unsigned long * addr )
147
+ static inline int arch_test_and_set_bit_lock (unsigned long nr ,
148
+ volatile unsigned long * addr )
149
149
{
150
150
return test_and_set_bits_lock (BIT_MASK (nr ),
151
151
addr + BIT_WORD (nr )) != 0 ;
152
152
}
153
153
154
- static __inline__ int test_and_clear_bit (unsigned long nr ,
155
- volatile unsigned long * addr )
154
+ static inline int arch_test_and_clear_bit (unsigned long nr ,
155
+ volatile unsigned long * addr )
156
156
{
157
157
return test_and_clear_bits (BIT_MASK (nr ), addr + BIT_WORD (nr )) != 0 ;
158
158
}
159
159
160
- static __inline__ int test_and_change_bit (unsigned long nr ,
161
- volatile unsigned long * addr )
160
+ static inline int arch_test_and_change_bit (unsigned long nr ,
161
+ volatile unsigned long * addr )
162
162
{
163
163
return test_and_change_bits (BIT_MASK (nr ), addr + BIT_WORD (nr )) != 0 ;
164
164
}
165
165
166
166
#ifdef CONFIG_PPC64
167
- static __inline__ unsigned long clear_bit_unlock_return_word ( int nr ,
168
- volatile unsigned long * addr )
167
+ static inline unsigned long
168
+ clear_bit_unlock_return_word ( int nr , volatile unsigned long * addr )
169
169
{
170
170
unsigned long old , t ;
171
171
unsigned long * p = (unsigned long * )addr + BIT_WORD (nr );
@@ -185,15 +185,18 @@ static __inline__ unsigned long clear_bit_unlock_return_word(int nr,
185
185
return old ;
186
186
}
187
187
188
- /* This is a special function for mm/filemap.c */
189
- #define clear_bit_unlock_is_negative_byte (nr , addr ) \
190
- (clear_bit_unlock_return_word(nr, addr) & BIT_MASK(PG_waiters))
188
+ /*
189
+ * This is a special function for mm/filemap.c
190
+ * Bit 7 corresponds to PG_waiters.
191
+ */
192
+ #define arch_clear_bit_unlock_is_negative_byte (nr , addr ) \
193
+ (clear_bit_unlock_return_word(nr, addr) & BIT_MASK(7))
191
194
192
195
#endif /* CONFIG_PPC64 */
193
196
194
197
#include <asm-generic/bitops/non-atomic.h>
195
198
196
- static __inline__ void __clear_bit_unlock (int nr , volatile unsigned long * addr )
199
+ static inline void arch___clear_bit_unlock (int nr , volatile unsigned long * addr )
197
200
{
198
201
__asm__ __volatile__(PPC_RELEASE_BARRIER "" ::: "memory" );
199
202
__clear_bit (nr , addr );
@@ -215,14 +218,14 @@ static __inline__ void __clear_bit_unlock(int nr, volatile unsigned long *addr)
215
218
* fls: find last (most-significant) bit set.
216
219
* Note fls(0) = 0, fls(1) = 1, fls(0x80000000) = 32.
217
220
*/
218
- static __inline__ int fls (unsigned int x )
221
+ static inline int fls (unsigned int x )
219
222
{
220
223
return 32 - __builtin_clz (x );
221
224
}
222
225
223
226
#include <asm-generic/bitops/builtin-__fls.h>
224
227
225
- static __inline__ int fls64 (__u64 x )
228
+ static inline int fls64 (__u64 x )
226
229
{
227
230
return 64 - __builtin_clzll (x );
228
231
}
@@ -239,6 +242,10 @@ unsigned long __arch_hweight64(__u64 w);
239
242
240
243
#include <asm-generic/bitops/find.h>
241
244
245
+ /* wrappers that deal with KASAN instrumentation */
246
+ #include <asm-generic/bitops/instrumented-atomic.h>
247
+ #include <asm-generic/bitops/instrumented-lock.h>
248
+
242
249
/* Little-endian versions */
243
250
#include <asm-generic/bitops/le.h>
244
251
0 commit comments