@@ -134,18 +134,38 @@ SWIFT_ENUM(CASType, closed)
134
134
parameterType CAtomics##opName(swiftType *_Nonnull atomic, parameterType pName, enum MemoryOrder order) \
135
135
{ return atomic_##op##_explicit(&(atomic->a), pName, order); }
136
136
137
+ #define CLANG_ATOMICS_WEAK_CAS (swiftType , parameterType ) \
138
+ static __inline__ __attribute__((__always_inline__)) \
139
+ __attribute__((overloadable)) \
140
+ _Bool CAtomicsCompareAndExchangeWeak(swiftType *_Nonnull atomic, parameterType *_Nonnull current, parameterType future, \
141
+ enum MemoryOrder orderSwap, enum LoadMemoryOrder orderLoad) \
142
+ { \
143
+ assert((unsigned int)orderLoad <= (unsigned int)orderSwap); \
144
+ assert(orderSwap == __ATOMIC_RELEASE ? orderLoad == __ATOMIC_RELAXED : true); \
145
+ return atomic_compare_exchange_weak_explicit(&(atomic->a), current, future, orderSwap, orderLoad); \
146
+ }
147
+
148
+ #define CLANG_ATOMICS_STRONG_CAS (swiftType , parameterType ) \
149
+ static __inline__ __attribute__((__always_inline__)) \
150
+ __attribute__((overloadable)) \
151
+ _Bool CAtomicsCompareAndExchangeStrong(swiftType *_Nonnull atomic, parameterType *_Nonnull current, parameterType future, \
152
+ enum MemoryOrder orderSwap, enum LoadMemoryOrder orderLoad) \
153
+ { \
154
+ assert((unsigned int)orderLoad <= (unsigned int)orderSwap); \
155
+ assert(orderSwap == __ATOMIC_RELEASE ? orderLoad == __ATOMIC_RELAXED : true); \
156
+ return atomic_compare_exchange_strong_explicit(&(atomic->a), current, future, orderSwap, orderLoad); \
157
+ }
158
+
137
159
#define CLANG_ATOMICS_CAS (swiftType , parameterType ) \
138
160
static __inline__ __attribute__((__always_inline__)) \
139
161
__attribute__((overloadable)) \
140
162
_Bool CAtomicsCompareAndExchange(swiftType *_Nonnull atomic, parameterType *_Nonnull current, parameterType future, \
141
163
enum CASType type, enum MemoryOrder orderSwap, enum LoadMemoryOrder orderLoad) \
142
164
{ \
143
- assert((unsigned int)orderLoad <= (unsigned int)orderSwap); \
144
- assert(orderSwap == __ATOMIC_RELEASE ? orderLoad == __ATOMIC_RELAXED : true); \
145
165
if(type == __ATOMIC_CAS_TYPE_STRONG) \
146
- return atomic_compare_exchange_strong_explicit(&( atomic->a) , current, future, orderSwap, orderLoad); \
166
+ return CAtomicsCompareAndExchangeStrong( atomic, current, future, orderSwap, orderLoad); \
147
167
else \
148
- return atomic_compare_exchange_weak_explicit(&( atomic->a) , current, future, orderSwap, orderLoad); \
168
+ return CAtomicsCompareAndExchangeWeak( atomic, current, future, orderSwap, orderLoad); \
149
169
} \
150
170
static __inline__ __attribute__((__always_inline__)) \
151
171
__attribute__((overloadable)) \
@@ -166,6 +186,8 @@ SWIFT_ENUM(CASType, closed)
166
186
CLANG_ATOMICS_LOAD(swiftType, parameterType) \
167
187
CLANG_ATOMICS_STORE(swiftType, parameterType) \
168
188
CLANG_ATOMICS_SWAP(swiftType, parameterType) \
189
+ CLANG_ATOMICS_STRONG_CAS(swiftType, parameterType) \
190
+ CLANG_ATOMICS_WEAK_CAS(swiftType, parameterType) \
169
191
CLANG_ATOMICS_CAS(swiftType, parameterType)
170
192
171
193
// macro to generate atomic struct + functions for integer types
@@ -237,19 +259,41 @@ CLANG_ATOMICS_BOOL_GENERATE(AtomicBool, atomic_bool, _Bool, _Alignof(atomic_bool
237
259
parameterType nullability CAtomicsExchange(swiftType *_Nonnull atomic, parameterType nullability value, enum MemoryOrder order) \
238
260
{ return (parameterType) atomic_exchange_explicit(&(atomic->a), (uintptr_t)value, order); }
239
261
262
+ #define CLANG_ATOMICS_POINTER_WEAK_CAS (swiftType , parameterType , nullability ) \
263
+ static __inline__ __attribute__((__always_inline__)) \
264
+ __attribute__((overloadable)) \
265
+ _Bool CAtomicsCompareAndExchangeWeak(swiftType *_Nonnull atomic, \
266
+ parameterType nullability* _Nonnull current, parameterType nullability future, \
267
+ enum MemoryOrder orderSwap, enum LoadMemoryOrder orderLoad) \
268
+ { \
269
+ assert((unsigned int)orderLoad <= (unsigned int)orderSwap); \
270
+ assert(orderSwap == __ATOMIC_RELEASE ? orderLoad == __ATOMIC_RELAXED : true); \
271
+ return atomic_compare_exchange_weak_explicit(&(atomic->a), (uintptr_t*)current, (uintptr_t)future, orderSwap, orderLoad); \
272
+ }
273
+
274
+ #define CLANG_ATOMICS_POINTER_STRONG_CAS (swiftType , parameterType , nullability ) \
275
+ static __inline__ __attribute__((__always_inline__)) \
276
+ __attribute__((overloadable)) \
277
+ _Bool CAtomicsCompareAndExchangeStrong(swiftType *_Nonnull atomic, \
278
+ parameterType nullability* _Nonnull current, parameterType nullability future, \
279
+ enum MemoryOrder orderSwap, enum LoadMemoryOrder orderLoad) \
280
+ { \
281
+ assert((unsigned int)orderLoad <= (unsigned int)orderSwap); \
282
+ assert(orderSwap == __ATOMIC_RELEASE ? orderLoad == __ATOMIC_RELAXED : true); \
283
+ return atomic_compare_exchange_strong_explicit(&(atomic->a), (uintptr_t*)current, (uintptr_t)future, orderSwap, orderLoad); \
284
+ }
285
+
240
286
#define CLANG_ATOMICS_POINTER_CAS (swiftType , parameterType , nullability ) \
241
287
static __inline__ __attribute__((__always_inline__)) \
242
288
__attribute__((overloadable)) \
243
289
_Bool CAtomicsCompareAndExchange(swiftType *_Nonnull atomic, \
244
290
parameterType nullability* _Nonnull current, parameterType nullability future, \
245
291
enum CASType type, enum MemoryOrder orderSwap, enum LoadMemoryOrder orderLoad) \
246
292
{ \
247
- assert((unsigned int)orderLoad <= (unsigned int)orderSwap); \
248
- assert(orderSwap == __ATOMIC_RELEASE ? orderLoad == __ATOMIC_RELAXED : true); \
249
293
if(type == __ATOMIC_CAS_TYPE_STRONG) \
250
- return atomic_compare_exchange_strong_explicit(&( atomic->a), (uintptr_t*) current, (uintptr_t) future, orderSwap, orderLoad); \
294
+ return CAtomicsCompareAndExchangeStrong( atomic, current, future, orderSwap, orderLoad); \
251
295
else \
252
- return atomic_compare_exchange_weak_explicit(&( atomic->a), (uintptr_t*) current, (uintptr_t) future, orderSwap, orderLoad); \
296
+ return CAtomicsCompareAndExchangeWeak( atomic, current, future, orderSwap, orderLoad); \
253
297
} \
254
298
static __inline__ __attribute__((__always_inline__)) \
255
299
__attribute__((overloadable)) \
@@ -271,6 +315,8 @@ CLANG_ATOMICS_BOOL_GENERATE(AtomicBool, atomic_bool, _Bool, _Alignof(atomic_bool
271
315
CLANG_ATOMICS_POINTER_LOAD(swiftType, parameterType, nullability) \
272
316
CLANG_ATOMICS_POINTER_STORE(swiftType, parameterType, nullability) \
273
317
CLANG_ATOMICS_POINTER_SWAP(swiftType, parameterType, nullability) \
318
+ CLANG_ATOMICS_POINTER_STRONG_CAS(swiftType, parameterType, nullability) \
319
+ CLANG_ATOMICS_POINTER_WEAK_CAS(swiftType, parameterType, nullability) \
274
320
CLANG_ATOMICS_POINTER_CAS(swiftType, parameterType, nullability)
275
321
276
322
// generate atomic pointer types + functions
@@ -345,18 +391,38 @@ CLANG_ATOMICS_POINTER_GENERATE(AtomicOptionalOpaquePointer, atomic_uintptr_t, st
345
391
structType CAtomicsExchange(atomicType *_Nonnull atomic, structType value, enum MemoryOrder order) \
346
392
{ structType rp; rp.tag_ptr = atomic_exchange_explicit(&(atomic->a), value.tag_ptr, order); return rp; }
347
393
394
+ #define CLANG_ATOMICS_TAGGED_POINTER_WEAK_CAS (swiftType , structType ) \
395
+ static __inline__ __attribute__((__always_inline__)) \
396
+ __attribute__((overloadable)) \
397
+ _Bool CAtomicsCompareAndExchangeWeak(swiftType *_Nonnull atomic, structType *_Nonnull current, structType future, \
398
+ enum MemoryOrder orderSwap, enum LoadMemoryOrder orderLoad) \
399
+ { \
400
+ assert((unsigned int)orderLoad <= (unsigned int)orderSwap); \
401
+ assert(orderSwap == __ATOMIC_RELEASE ? orderLoad == __ATOMIC_RELAXED : true); \
402
+ return atomic_compare_exchange_weak_explicit(&(atomic->a), &(current->tag_ptr), future.tag_ptr, orderSwap, orderLoad); \
403
+ }
404
+
405
+ #define CLANG_ATOMICS_TAGGED_POINTER_STRONG_CAS (swiftType , structType ) \
406
+ static __inline__ __attribute__((__always_inline__)) \
407
+ __attribute__((overloadable)) \
408
+ _Bool CAtomicsCompareAndExchangeStrong(swiftType *_Nonnull atomic, structType *_Nonnull current, structType future, \
409
+ enum MemoryOrder orderSwap, enum LoadMemoryOrder orderLoad) \
410
+ { \
411
+ assert((unsigned int)orderLoad <= (unsigned int)orderSwap); \
412
+ assert(orderSwap == __ATOMIC_RELEASE ? orderLoad == __ATOMIC_RELAXED : true); \
413
+ return atomic_compare_exchange_strong_explicit(&(atomic->a), &(current->tag_ptr), future.tag_ptr, orderSwap, orderLoad); \
414
+ }
415
+
348
416
#define CLANG_ATOMICS_TAGGED_POINTER_CAS (atomicType , structType ) \
349
417
static __inline__ __attribute__((__always_inline__)) \
350
418
__attribute__((overloadable)) \
351
419
_Bool CAtomicsCompareAndExchange(atomicType *_Nonnull atomic, structType *_Nonnull current, structType future, \
352
420
enum CASType type, enum MemoryOrder orderSwap, enum LoadMemoryOrder orderLoad) \
353
421
{ \
354
- assert((unsigned int)orderLoad <= (unsigned int)orderSwap); \
355
- assert(orderSwap == __ATOMIC_RELEASE ? orderLoad == __ATOMIC_RELAXED : true); \
356
422
if(type == __ATOMIC_CAS_TYPE_STRONG) \
357
- return atomic_compare_exchange_strong_explicit(&( atomic->a), &( current->tag_ptr) , future.tag_ptr , orderSwap, orderLoad); \
423
+ return CAtomicsCompareAndExchangeStrong( atomic, current, future, orderSwap, orderLoad); \
358
424
else \
359
- return atomic_compare_exchange_weak_explicit(&( atomic->a), &( current->tag_ptr) , future.tag_ptr , orderSwap, orderLoad); \
425
+ return CAtomicsCompareAndExchangeWeak( atomic, current, future, orderSwap, orderLoad); \
360
426
} \
361
427
static __inline__ __attribute__((__always_inline__)) \
362
428
__attribute__((overloadable)) \
@@ -387,6 +453,8 @@ CLANG_ATOMICS_POINTER_GENERATE(AtomicOptionalOpaquePointer, atomic_uintptr_t, st
387
453
CLANG_ATOMICS_TAGGED_POINTER_LOAD(atomicType, structType) \
388
454
CLANG_ATOMICS_TAGGED_POINTER_STORE(atomicType, structType) \
389
455
CLANG_ATOMICS_TAGGED_POINTER_SWAP(atomicType, structType) \
456
+ CLANG_ATOMICS_TAGGED_POINTER_STRONG_CAS(atomicType, structType) \
457
+ CLANG_ATOMICS_TAGGED_POINTER_WEAK_CAS(atomicType, structType) \
390
458
CLANG_ATOMICS_TAGGED_POINTER_CAS(atomicType, structType)
391
459
392
460
CLANG_ATOMICS_TAGGED_POINTER_GENERATE (TaggedRawPointer , const void * , _Nonnull )
@@ -419,17 +487,23 @@ CLANG_ATOMICS_POINTER_SWAP(OpaqueUnmanagedHelper, const void*, _Nullable)
419
487
// this should only be used for debugging and testing
420
488
CLANG_ATOMICS_POINTER_LOAD (OpaqueUnmanagedHelper , const void * , _Nullable )
421
489
490
+ static __inline__ __attribute__((__always_inline__ )) \
491
+ __attribute__((overloadable )) \
492
+ _Bool CAtomicsCompareAndExchangeStrong (OpaqueUnmanagedHelper * _Nonnull atomic ,
493
+ const void * _Nullable current , const void * _Nullable future ,
494
+ enum MemoryOrder order )
495
+ {
496
+ uintptr_t pointer = (uintptr_t ) current ;
497
+ return atomic_compare_exchange_strong_explicit (& (atomic -> a ), & pointer , (uintptr_t )future , order , memory_order_relaxed );
498
+ }
499
+
422
500
static __inline__ __attribute__((__always_inline__ )) \
423
501
__attribute__((overloadable )) \
424
502
_Bool CAtomicsCompareAndExchange (OpaqueUnmanagedHelper * _Nonnull atomic ,
425
503
const void * _Nullable current , const void * _Nullable future ,
426
504
enum CASType type , enum MemoryOrder order )
427
505
{
428
- uintptr_t pointer = (uintptr_t ) current ;
429
- if (type == __ATOMIC_CAS_TYPE_WEAK )
430
- return atomic_compare_exchange_weak_explicit (& (atomic -> a ), & pointer , (uintptr_t )future , order , memory_order_relaxed );
431
- else
432
- return atomic_compare_exchange_strong_explicit (& (atomic -> a ), & pointer , (uintptr_t )future , order , memory_order_relaxed );
506
+ return CAtomicsCompareAndExchangeStrong (atomic , current , future , order );
433
507
}
434
508
435
509
#endif
0 commit comments