18#ifndef __MBED_ATOMIC_IMPL_H__
19#define __MBED_ATOMIC_IMPL_H__
21#ifndef __MBED_UTIL_ATOMIC_H__
22#error "mbed_atomic_impl.h is designed to be included only by mbed_atomic.h"
27#include "platform/mbed_assert.h"
28#include "platform/mbed_toolchain.h"
36#define MBED_CHECK_LOAD_ORDER(order) MBED_ASSERT((order) != mbed_memory_order_release && (order) != mbed_memory_order_acq_rel)
39#define MBED_CHECK_STORE_ORDER(order) MBED_ASSERT((order) != mbed_memory_order_consume && (order) != mbed_memory_order_acquire && (order) != mbed_memory_order_acq_rel)
42#define MBED_CHECK_CAS_ORDER(success, failure) \
43 MBED_ASSERT((failure) <= (success) && (failure) != mbed_memory_order_release && (failure) != mbed_memory_order_acq_rel)
45#define MBED_CHECK_LOAD_ORDER(order) (void)0
46#define MBED_CHECK_STORE_ORDER(order) (void)0
47#define MBED_CHECK_CAS_ORDER(success, failure) (void)0
52#define MBED_ATOMIC_PTR_SIZE 32
54#define MBED_ATOMIC_PTR_SIZE 64
58#define MBED_ACQUIRE_BARRIER(order) do { \
59 if ((order) & (mbed_memory_order_consume|mbed_memory_order_acquire)) { \
64#define MBED_RELEASE_BARRIER(order) do { \
65 if ((order) & mbed_memory_order_release) { \
70#define MBED_SEQ_CST_BARRIER(order) do { \
71 if ((order) == mbed_memory_order_seq_cst) { \
77#if MBED_EXCLUSIVE_ACCESS
87#if MBED_EXCLUSIVE_ACCESS_THUMB1
88#define MBED_DOP_REG "l"
89#define MBED_CMP_IMM "I"
90#define MBED_SUB3_IMM "L"
92#define MBED_DOP_REG "r"
93#define MBED_CMP_IMM "IL"
94#define MBED_SUB3_IMM "IL"
97#if defined __clang__ || defined __GNUC__
98#define DO_MBED_LOCKFREE_EXCHG_ASM(M) \
100 ".syntax unified\n\t" \
101 "LDREX"#M "\t%[oldValue], %[value]\n\t" \
102 "STREX"#M "\t%[fail], %[newValue], %[value]\n\t" \
103 : [oldValue] "=&r" (oldValue), \
104 [fail] "=&r" (fail), \
105 [value] "+Q" (*valuePtr) \
106 : [newValue] "r" (newValue) \
109#elif defined __ICCARM__
111#define DO_MBED_LOCKFREE_EXCHG_ASM(M) \
113 "LDREX"#M "\t%[oldValue], [%[valuePtr]]\n" \
114 "STREX"#M "\t%[fail], %[newValue], [%[valuePtr]]\n" \
115 : [oldValue] "=&r" (oldValue), \
116 [fail] "=&r" (fail) \
117 : [valuePtr] "r" (valuePtr), \
118 [newValue] "r" (newValue) \
123#if defined __clang__ || defined __GNUC__
124#define DO_MBED_LOCKFREE_NEWVAL_2OP_ASM(OP, Constants, M) \
126 ".syntax unified\n\t" \
127 "LDREX"#M "\t%[newValue], %[value]\n\t" \
128 #OP "\t%[newValue], %[arg]\n\t" \
129 "STREX"#M "\t%[fail], %[newValue], %[value]\n\t" \
130 : [newValue] "=&" MBED_DOP_REG (newValue), \
131 [fail] "=&r" (fail), \
132 [value] "+Q" (*valuePtr) \
133 : [arg] Constants MBED_DOP_REG (arg) \
136#elif defined __ICCARM__
139#define DO_MBED_LOCKFREE_NEWVAL_2OP_ASM(OP, Constants, M) \
141 "LDREX"#M "\t%[newValue], [%[valuePtr]]\n" \
142 #OP "\t%[newValue], %[newValue], %[arg]\n" \
143 "STREX"#M "\t%[fail], %[newValue], [%[valuePtr]]\n" \
144 : [newValue] "=&r" (newValue), \
145 [fail] "=&r" (fail) \
146 : [valuePtr] "r" (valuePtr), \
152#if defined __clang__ || defined __GNUC__
153#define DO_MBED_LOCKFREE_OLDVAL_3OP_ASM(OP, Constants, M) \
155 ".syntax unified\n\t" \
156 "LDREX"#M "\t%[oldValue], %[value]\n\t" \
157 #OP "\t%[newValue], %[oldValue], %[arg]\n\t" \
158 "STREX"#M "\t%[fail], %[newValue], %[value]\n\t" \
159 : [oldValue] "=&" MBED_DOP_REG (oldValue), \
160 [newValue] "=&" MBED_DOP_REG (newValue), \
161 [fail] "=&r" (fail), \
162 [value] "+Q" (*valuePtr) \
163 : [arg] Constants MBED_DOP_REG (arg) \
166#elif defined __ICCARM__
168#define DO_MBED_LOCKFREE_OLDVAL_3OP_ASM(OP, Constants, M) \
170 "LDREX"#M "\t%[oldValue], [%[valuePtr]]\n" \
171 #OP "\t%[newValue], %[oldValue], %[arg]\n" \
172 "STREX"#M "\t%[fail], %[newValue], [%[valuePtr]]\n" \
173 : [oldValue] "=&r" (oldValue), \
174 [newValue] "=&r" (newValue), \
175 [fail] "=&r" (fail) \
176 : [valuePtr] "r" (valuePtr), \
185#if defined __clang__ || defined __GNUC__
186#define DO_MBED_LOCKFREE_OLDVAL_2OP_ASM(OP, Constants, M) \
188 ".syntax unified\n\t" \
189 "LDREX"#M "\t%[oldValue], %[value]\n\t" \
190 "MOV" "\t%[newValue], %[oldValue]\n\t" \
191 #OP "\t%[newValue], %[arg]\n\t" \
192 "STREX"#M "\t%[fail], %[newValue], %[value]\n\t" \
193 : [oldValue] "=&r" (oldValue), \
194 [newValue] "=&l" (newValue), \
195 [fail] "=&r" (fail), \
196 [value] "+Q" (*valuePtr) \
197 : [arg] Constants "l" (arg) \
200#elif defined __ICCARM__
201#define DO_MBED_LOCKFREE_OLDVAL_2OP_ASM(OP, Constants, M) \
203 "LDREX"#M "\t%[oldValue], [%[valuePtr]]\n" \
204 "MOV" "\t%[newValue], %[oldValue]\n" \
205 #OP "\t%[newValue], %[arg]\n" \
206 "STREX"#M "\t%[fail], %[newValue], [%[valuePtr]]\n" \
207 : [oldValue] "=&r" (oldValue), \
208 [newValue] "=&r" (newValue), \
209 [fail] "=&r" (fail) \
210 : [valuePtr] "r" (valuePtr), \
221#if MBED_EXCLUSIVE_ACCESS_ARM
222#if defined __clang__ || defined __GNUC__
223#define DO_MBED_LOCKFREE_CAS_WEAK_ASM(M) \
225 ".syntax unified\n\t" \
226 "LDREX"#M "\t%[oldValue], %[value]\n\t" \
227 "SUBS" "\t%[fail], %[oldValue], %[expectedValue]\n\t"\
228 "STREX"#M"EQ\t%[fail], %[desiredValue], %[value]\n\t" \
229 : [oldValue] "=&r" (oldValue), \
230 [fail] "=&r" (fail), \
231 [value] "+Q" (*ptr) \
232 : [desiredValue] "r" (desiredValue), \
233 [expectedValue] "ILr" (expectedValue) \
236#elif defined __ICCARM__
237#define DO_MBED_LOCKFREE_CAS_WEAK_ASM(M) \
239 "LDREX"#M "\t%[oldValue], [%[valuePtr]]\n" \
240 "SUBS" "\t%[fail], %[oldValue], %[expectedValue]\n" \
241 "STREX"#M"EQ\t%[fail], %[desiredValue], [%[valuePtr]]\n"\
242 : [oldValue] "=&r" (oldValue), \
243 [fail] "=&r" (fail) \
244 : [desiredValue] "r" (desiredValue), \
245 [expectedValue] "r" (expectedValue), \
246 [valuePtr] "r" (ptr), \
251#if defined __clang__ || defined __GNUC__
252#define DO_MBED_LOCKFREE_CAS_WEAK_ASM(M) \
254 ".syntax unified\n\t" \
255 "LDREX"#M "\t%[oldValue], %[value]\n\t" \
256 "SUBS" "\t%[fail], %[oldValue], %[expectedValue]\n\t"\
258 "STREX"#M "\t%[fail], %[desiredValue], %[value]\n" \
260 : [oldValue] "=&" MBED_DOP_REG (oldValue), \
261 [fail] "=&" MBED_DOP_REG (fail), \
262 [value] "+Q" (*ptr) \
263 : [desiredValue] "r" (desiredValue), \
264 [expectedValue] MBED_SUB3_IMM MBED_DOP_REG (expectedValue) \
267#elif defined __ICCARM__
268#define DO_MBED_LOCKFREE_CAS_WEAK_ASM(M) \
270 "LDREX"#M "\t%[oldValue], [%[valuePtr]]\n" \
271 "SUBS" "\t%[fail], %[oldValue], %[expectedValue]\n" \
273 "STREX"#M "\t%[fail], %[desiredValue], [%[valuePtr]]\n"\
275 : [oldValue] "=&r" (oldValue), \
276 [fail] "=&r" (fail) \
277 : [desiredValue] "r" (desiredValue), \
278 [expectedValue] "r" (expectedValue), \
279 [valuePtr] "r" (ptr) \
290#if defined __clang__ || defined __GNUC__
291#define DO_MBED_LOCKFREE_CAS_STRONG_ASM(M) \
293 ".syntax unified\n\t" \
295 "LDREX"#M "\t%[oldValue], %[value]\n\t" \
296 "SUBS" "\t%[fail], %[oldValue], %[expectedValue]\n\t"\
298 "STREX"#M "\t%[fail], %[desiredValue], %[value]\n\t" \
299 "CMP" "\t%[fail], #0\n\t" \
302 : [oldValue] "=&" MBED_DOP_REG (oldValue), \
303 [fail] "=&" MBED_DOP_REG (fail), \
304 [value] "+Q" (*ptr) \
305 : [desiredValue] "r" (desiredValue), \
306 [expectedValue] MBED_SUB3_IMM MBED_DOP_REG (expectedValue) \
309#elif defined __ICCARM__
310#define DO_MBED_LOCKFREE_CAS_STRONG_ASM(M) \
313 "LDREX"#M "\t%[oldValue], [%[valuePtr]]\n" \
314 "SUBS" "\t%[fail], %[oldValue], %[expectedValue]\n" \
316 "STREX"#M "\t%[fail], %[desiredValue], [%[valuePtr]]\n"\
317 "CMP" "\t%[fail], #0\n" \
320 : [oldValue] "=&r" (oldValue), \
321 [fail] "=&r" (fail) \
322 : [desiredValue] "r" (desiredValue), \
323 [expectedValue] "r" (expectedValue), \
324 [valuePtr] "r" (ptr) \
343#define DO_MBED_LOCKFREE_EXCHG_OP(T, fn_suffix, M) \
344inline T core_util_atomic_exchange_##fn_suffix(volatile T *valuePtr, T newValue) \
350 DO_MBED_LOCKFREE_EXCHG_ASM(M); \
356MBED_FORCEINLINE T core_util_atomic_exchange_explicit_##fn_suffix( \
357 volatile T *valuePtr, T newValue, mbed_memory_order order) \
361 MBED_RELEASE_BARRIER(order); \
363 DO_MBED_LOCKFREE_EXCHG_ASM(M); \
365 MBED_ACQUIRE_BARRIER(order); \
369#define DO_MBED_LOCKFREE_CAS_WEAK_OP(T, fn_suffix, M) \
370inline bool core_util_atomic_compare_exchange_weak_##fn_suffix(volatile T *ptr, T *expectedCurrentValue, T desiredValue) \
374 uint32_t fail, expectedValue = *expectedCurrentValue; \
375 DO_MBED_LOCKFREE_CAS_WEAK_ASM(M); \
377 *expectedCurrentValue = oldValue; \
383MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_##fn_suffix(volatile T *ptr, T *expectedCurrentValue, T desiredValue, mbed_memory_order success, mbed_memory_order failure) \
385 MBED_CHECK_CAS_ORDER(success, failure); \
386 MBED_RELEASE_BARRIER(success); \
388 uint32_t fail, expectedValue = *expectedCurrentValue; \
389 DO_MBED_LOCKFREE_CAS_WEAK_ASM(M); \
391 *expectedCurrentValue = oldValue; \
393 MBED_ACQUIRE_BARRIER(fail ? failure : success); \
397#define DO_MBED_LOCKFREE_CAS_STRONG_OP(T, fn_suffix, M) \
398inline bool core_util_atomic_cas_##fn_suffix(volatile T *ptr, T *expectedCurrentValue, T desiredValue) \
402 uint32_t fail, expectedValue = *expectedCurrentValue; \
403 DO_MBED_LOCKFREE_CAS_STRONG_ASM(M); \
405 *expectedCurrentValue = oldValue; \
411MBED_FORCEINLINE bool core_util_atomic_cas_explicit_##fn_suffix(volatile T *ptr, T *expectedCurrentValue, T desiredValue, mbed_memory_order success, mbed_memory_order failure) \
413 MBED_CHECK_CAS_ORDER(success, failure); \
414 MBED_RELEASE_BARRIER(success); \
416 uint32_t fail, expectedValue = *expectedCurrentValue; \
417 DO_MBED_LOCKFREE_CAS_STRONG_ASM(M); \
419 *expectedCurrentValue = oldValue; \
421 MBED_ACQUIRE_BARRIER(fail ? failure : success); \
426#define DO_MBED_LOCKFREE_NEWVAL_2OP(name, OP, Constants, T, fn_suffix, M) \
427inline T core_util_atomic_##name##_##fn_suffix(volatile T *valuePtr, T arg) \
429 uint32_t fail, newValue; \
432 DO_MBED_LOCKFREE_NEWVAL_2OP_ASM(OP, Constants, M); \
435 return (T) newValue; \
438MBED_FORCEINLINE T core_util_atomic_##name##_explicit_##fn_suffix( \
439 volatile T *valuePtr, T arg, mbed_memory_order order) \
441 uint32_t fail, newValue; \
442 MBED_RELEASE_BARRIER(order); \
444 DO_MBED_LOCKFREE_NEWVAL_2OP_ASM(OP, Constants, M); \
446 MBED_ACQUIRE_BARRIER(order); \
447 return (T) newValue; \
450#define DO_MBED_LOCKFREE_OLDVAL_2OP(name, OP, Constants, T, fn_suffix, M) \
451inline T core_util_atomic_##name##_##fn_suffix(volatile T *valuePtr, T arg) \
454 uint32_t fail, newValue; \
457 DO_MBED_LOCKFREE_OLDVAL_2OP_ASM(OP, Constants, M); \
463MBED_FORCEINLINE T core_util_atomic_##name##_explicit_##fn_suffix( \
464 volatile T *valuePtr, T arg, mbed_memory_order order) \
467 uint32_t fail, newValue; \
468 MBED_RELEASE_BARRIER(order); \
470 DO_MBED_LOCKFREE_OLDVAL_2OP_ASM(OP, Constants, M); \
472 MBED_ACQUIRE_BARRIER(order); \
476#define DO_MBED_LOCKFREE_OLDVAL_3OP(name, OP, Constants, T, fn_suffix, M) \
477inline T core_util_atomic_##name##_##fn_suffix(volatile T *valuePtr, T arg) { \
479 uint32_t fail, newValue; \
482 DO_MBED_LOCKFREE_OLDVAL_3OP_ASM(OP, Constants, M); \
488MBED_FORCEINLINE T core_util_atomic_##name##_explicit_##fn_suffix( \
489 volatile T *valuePtr, T arg, mbed_memory_order order) \
492 uint32_t fail, newValue; \
493 MBED_RELEASE_BARRIER(order); \
495 DO_MBED_LOCKFREE_OLDVAL_3OP_ASM(OP, Constants, M); \
497 MBED_ACQUIRE_BARRIER(order); \
504 bool oldValue, newValue =
true;
507 DO_MBED_LOCKFREE_EXCHG_ASM(B);
515 MBED_RELEASE_BARRIER(order);
516 bool oldValue, newValue =
true;
519 DO_MBED_LOCKFREE_EXCHG_ASM(B);
521 MBED_ACQUIRE_BARRIER(order);
527#define DO_MBED_LOCKFREE_EXCHG_OPS() \
528 DO_MBED_LOCKFREE_EXCHG_OP(uint8_t, u8, B) \
529 DO_MBED_LOCKFREE_EXCHG_OP(uint16_t, u16, H) \
530 DO_MBED_LOCKFREE_EXCHG_OP(uint32_t, u32, )
532#define DO_MBED_LOCKFREE_NEWVAL_2OPS(name, OP, Constants) \
533 DO_MBED_LOCKFREE_NEWVAL_2OP(name, OP, Constants, uint8_t, u8, B) \
534 DO_MBED_LOCKFREE_NEWVAL_2OP(name, OP, Constants, uint16_t, u16, H) \
535 DO_MBED_LOCKFREE_NEWVAL_2OP(name, OP, Constants, uint32_t, u32, )
537#define DO_MBED_LOCKFREE_OLDVAL_3OPS(name, OP, Constants) \
538 DO_MBED_LOCKFREE_OLDVAL_3OP(name, OP, Constants, uint8_t, u8, B) \
539 DO_MBED_LOCKFREE_OLDVAL_3OP(name, OP, Constants, uint16_t, u16, H) \
540 DO_MBED_LOCKFREE_OLDVAL_3OP(name, OP, Constants, uint32_t, u32, )
542#define DO_MBED_LOCKFREE_OLDVAL_2OPS(name, OP, Constants) \
543 DO_MBED_LOCKFREE_OLDVAL_2OP(name, OP, Constants, uint8_t, u8, B) \
544 DO_MBED_LOCKFREE_OLDVAL_2OP(name, OP, Constants, uint16_t, u16, H) \
545 DO_MBED_LOCKFREE_OLDVAL_2OP(name, OP, Constants, uint32_t, u32, )
547#define DO_MBED_LOCKFREE_CAS_STRONG_OPS() \
548 DO_MBED_LOCKFREE_CAS_STRONG_OP(uint8_t, u8, B) \
549 DO_MBED_LOCKFREE_CAS_STRONG_OP(uint16_t, u16, H) \
550 DO_MBED_LOCKFREE_CAS_STRONG_OP(uint32_t, u32, )
552#define DO_MBED_LOCKFREE_CAS_WEAK_OPS() \
553 DO_MBED_LOCKFREE_CAS_WEAK_OP(uint8_t, u8, B) \
554 DO_MBED_LOCKFREE_CAS_WEAK_OP(uint16_t, u16, H) \
555 DO_MBED_LOCKFREE_CAS_WEAK_OP(uint32_t, u32, )
566#if !MBED_EXCLUSIVE_ACCESS_THUMB1
576DO_MBED_LOCKFREE_OLDVAL_3OPS(fetch_add, ADDS,
"IL")
577DO_MBED_LOCKFREE_NEWVAL_2OPS(incr, ADDS, "IL")
578DO_MBED_LOCKFREE_OLDVAL_3OPS(fetch_sub, SUBS, "IL")
579DO_MBED_LOCKFREE_NEWVAL_2OPS(decr, SUBS, "IL")
582DO_MBED_LOCKFREE_OLDVAL_3OPS(fetch_and, ANDS, "IK")
583DO_MBED_LOCKFREE_NEWVAL_2OPS(and_fetch, ANDS, "IK")
584#if MBED_EXCLUSIVE_ACCESS_ARM
586DO_MBED_LOCKFREE_OLDVAL_3OPS(fetch_or, ORRS,
"I")
587DO_MBED_LOCKFREE_NEWVAL_2OPS(or_fetch, ORRS, "I")
590DO_MBED_LOCKFREE_OLDVAL_3OPS(fetch_or, ORRS,
"IK")
591DO_MBED_LOCKFREE_NEWVAL_2OPS(or_fetch, ORRS, "IK")
594DO_MBED_LOCKFREE_OLDVAL_3OPS(fetch_xor, EORS,
"I")
595DO_MBED_LOCKFREE_NEWVAL_2OPS(xor_fetch, EORS, "I")
601DO_MBED_LOCKFREE_OLDVAL_3OPS(fetch_add, ADDS,
"L")
602DO_MBED_LOCKFREE_NEWVAL_2OPS(incr, ADDS, "IJ")
603DO_MBED_LOCKFREE_OLDVAL_3OPS(fetch_sub, SUBS, "L")
604DO_MBED_LOCKFREE_NEWVAL_2OPS(decr, SUBS, "IJ")
605DO_MBED_LOCKFREE_OLDVAL_2OPS(fetch_and, ANDS, "")
606DO_MBED_LOCKFREE_NEWVAL_2OPS(and_fetch, ANDS, "")
607DO_MBED_LOCKFREE_OLDVAL_2OPS(fetch_or, ORRS, "")
608DO_MBED_LOCKFREE_NEWVAL_2OPS(or_fetch, ORRS, "")
609DO_MBED_LOCKFREE_OLDVAL_2OPS(fetch_xor, EORS, "")
610DO_MBED_LOCKFREE_NEWVAL_2OPS(xor_fetch, EORS, "")
613DO_MBED_LOCKFREE_EXCHG_OPS()
614DO_MBED_LOCKFREE_CAS_STRONG_OPS()
615DO_MBED_LOCKFREE_CAS_WEAK_OPS()
617#define DO_MBED_LOCKED_FETCH_OP_ORDERINGS(name) \
618 DO_MBED_LOCKED_FETCH_OP_ORDERING(name, uint64_t, u64)
619#define DO_MBED_LOCKED_CAS_ORDERINGS(name) \
620 DO_MBED_LOCKED_CAS_ORDERING(name, uint64_t, u64)
623#define DO_MBED_LOCKED_FETCH_OP_ORDERINGS(name) \
624 DO_MBED_LOCKED_FETCH_OP_ORDERING(name, uint8_t, u8) \
625 DO_MBED_LOCKED_FETCH_OP_ORDERING(name, uint16_t, u16) \
626 DO_MBED_LOCKED_FETCH_OP_ORDERING(name, uint32_t, u32) \
627 DO_MBED_LOCKED_FETCH_OP_ORDERING(name, uint64_t, u64)
628#define DO_MBED_LOCKED_CAS_ORDERINGS(name) \
629 DO_MBED_LOCKED_CAS_ORDERING(name, uint8_t, u8) \
630 DO_MBED_LOCKED_CAS_ORDERING(name, uint16_t, u16) \
631 DO_MBED_LOCKED_CAS_ORDERING(name, uint32_t, u32) \
632 DO_MBED_LOCKED_CAS_ORDERING(name, uint64_t, u64)
644#define DO_MBED_LOCKFREE_LOADSTORE(T, V, fn_suffix) \
645MBED_FORCEINLINE T core_util_atomic_load_##fn_suffix(T const V *valuePtr) \
647 T value = *valuePtr; \
652MBED_FORCEINLINE T core_util_atomic_load_explicit_##fn_suffix(T const V *valuePtr, mbed_memory_order order) \
654 MBED_CHECK_LOAD_ORDER(order); \
655 T value = *valuePtr; \
656 MBED_ACQUIRE_BARRIER(order); \
660MBED_FORCEINLINE void core_util_atomic_store_##fn_suffix(T V *valuePtr, T value) \
667MBED_FORCEINLINE void core_util_atomic_store_explicit_##fn_suffix(T V *valuePtr, T value, mbed_memory_order order) \
669 MBED_CHECK_STORE_ORDER(order); \
670 MBED_RELEASE_BARRIER(order); \
672 MBED_SEQ_CST_BARRIER(order); \
678 flagPtr->_flag =
false;
684 MBED_CHECK_STORE_ORDER(order);
685 MBED_RELEASE_BARRIER(order);
686 flagPtr->_flag =
false;
687 MBED_SEQ_CST_BARRIER(order);
700 flagPtr->_flag =
false;
706 MBED_RELEASE_BARRIER(order);
707 flagPtr->_flag =
false;
708 MBED_SEQ_CST_BARRIER(order);
711DO_MBED_LOCKFREE_LOADSTORE(uint8_t,, u8)
712DO_MBED_LOCKFREE_LOADSTORE(uint16_t,, u16)
713DO_MBED_LOCKFREE_LOADSTORE(uint32_t,, u32)
714DO_MBED_LOCKFREE_LOADSTORE(int8_t,, s8)
715DO_MBED_LOCKFREE_LOADSTORE(int16_t,, s16)
716DO_MBED_LOCKFREE_LOADSTORE(int32_t,, s32)
717DO_MBED_LOCKFREE_LOADSTORE(
bool,,
bool)
718DO_MBED_LOCKFREE_LOADSTORE(
void *,, ptr)
722DO_MBED_LOCKFREE_LOADSTORE(uint8_t,
volatile, u8)
723DO_MBED_LOCKFREE_LOADSTORE(uint16_t, volatile, u16)
724DO_MBED_LOCKFREE_LOADSTORE(uint32_t, volatile, u32)
725DO_MBED_LOCKFREE_LOADSTORE(int8_t, volatile, s8)
726DO_MBED_LOCKFREE_LOADSTORE(int16_t, volatile, s16)
727DO_MBED_LOCKFREE_LOADSTORE(int32_t, volatile, s32)
728DO_MBED_LOCKFREE_LOADSTORE(
bool, volatile,
bool)
729DO_MBED_LOCKFREE_LOADSTORE(
void *, volatile, ptr)
747#define DO_MBED_SIGNED_CAS_OP(name, T, fn_suffix) \
748MBED_FORCEINLINE bool core_util_atomic_##name##_s##fn_suffix(volatile T *ptr, \
749 T *expectedCurrentValue, T desiredValue) \
751 return core_util_atomic_##name##_u##fn_suffix((volatile u##T *)ptr, \
752 (u##T *)expectedCurrentValue, (u##T)desiredValue); \
755MBED_FORCEINLINE bool core_util_atomic_##name##_explicit_s##fn_suffix(volatile T *ptr, \
756 T *expectedCurrentValue, T desiredValue, \
757 mbed_memory_order success, mbed_memory_order failure) \
759 return core_util_atomic_##name##_explicit_u##fn_suffix((volatile u##T *)ptr, \
760 (u##T *)expectedCurrentValue, (u##T)desiredValue, success, failure); \
763#define DO_MBED_SIGNED_CAS_OPS(name) \
764 DO_MBED_SIGNED_CAS_OP(name, int8_t, 8) \
765 DO_MBED_SIGNED_CAS_OP(name, int16_t, 16) \
766 DO_MBED_SIGNED_CAS_OP(name, int32_t, 32) \
767 DO_MBED_SIGNED_CAS_OP(name, int64_t, 64)
769DO_MBED_SIGNED_CAS_OPS(cas)
770DO_MBED_SIGNED_CAS_OPS(compare_exchange_weak)
779 return core_util_atomic_cas_explicit_u8((
volatile uint8_t *)ptr, (uint8_t *)expectedCurrentValue, desiredValue, success, failure);
784#if MBED_ATOMIC_PTR_SIZE == 32
786 (
volatile uint32_t *)ptr,
787 (uint32_t *)expectedCurrentValue,
788 (uint32_t)desiredValue);
791 (
volatile uint64_t *)ptr,
792 (uint64_t *)expectedCurrentValue,
793 (uint64_t)desiredValue);
799#if MBED_ATOMIC_PTR_SIZE == 32
800 return core_util_atomic_cas_explicit_u32(
801 (
volatile uint32_t *)ptr,
802 (uint32_t *)expectedCurrentValue,
803 (uint32_t)desiredValue,
806 return core_util_atomic_cas_explicit_u64(
807 (
volatile uint64_t *)ptr,
808 (uint64_t *)expectedCurrentValue,
809 (uint64_t)desiredValue,
821 return core_util_atomic_compare_exchange_weak_explicit_u8((
volatile uint8_t *)ptr, (uint8_t *)expectedCurrentValue, desiredValue, success, failure);
826#if MBED_ATOMIC_PTR_SIZE == 32
828 (
volatile uint32_t *)ptr,
829 (uint32_t *)expectedCurrentValue,
830 (uint32_t)desiredValue);
833 (
volatile uint64_t *)ptr,
834 (uint64_t *)expectedCurrentValue,
835 (uint64_t)desiredValue);
841#if MBED_ATOMIC_PTR_SIZE == 32
842 return core_util_atomic_compare_exchange_weak_explicit_u32(
843 (
volatile uint32_t *)ptr,
844 (uint32_t *)expectedCurrentValue,
845 (uint32_t)desiredValue,
848 return core_util_atomic_compare_exchange_weak_explicit_u64(
849 (
volatile uint64_t *)ptr,
850 (uint64_t *)expectedCurrentValue,
851 (uint64_t)desiredValue,
856#define DO_MBED_SIGNED_FETCH_OP(name, T, fn_suffix) \
857MBED_FORCEINLINE T core_util_atomic_##name##_s##fn_suffix(volatile T *valuePtr, T arg) \
859 return (T)core_util_atomic_##name##_u##fn_suffix((volatile u##T *)valuePtr, (u##T)arg); \
862#define DO_MBED_SIGNED_EXPLICIT_FETCH_OP(name, T, fn_suffix) \
863MBED_FORCEINLINE T core_util_atomic_##name##_explicit_s##fn_suffix(volatile T *valuePtr, T arg, mbed_memory_order order) \
865 return (T)core_util_atomic_##name##_explicit_u##fn_suffix((volatile u##T *)valuePtr, (u##T)arg, order); \
868#define DO_MBED_SIGNED_FETCH_OPS(name) \
869 DO_MBED_SIGNED_FETCH_OP(name, int8_t, 8) \
870 DO_MBED_SIGNED_FETCH_OP(name, int16_t, 16) \
871 DO_MBED_SIGNED_FETCH_OP(name, int32_t, 32) \
872 DO_MBED_SIGNED_FETCH_OP(name, int64_t, 64)
874#define DO_MBED_SIGNED_EXPLICIT_FETCH_OPS(name) \
875 DO_MBED_SIGNED_EXPLICIT_FETCH_OP(name, int8_t, 8) \
876 DO_MBED_SIGNED_EXPLICIT_FETCH_OP(name, int16_t, 16) \
877 DO_MBED_SIGNED_EXPLICIT_FETCH_OP(name, int32_t, 32) \
878 DO_MBED_SIGNED_EXPLICIT_FETCH_OP(name, int64_t, 64)
880DO_MBED_SIGNED_FETCH_OPS(exchange)
881DO_MBED_SIGNED_FETCH_OPS(incr)
882DO_MBED_SIGNED_FETCH_OPS(decr)
883DO_MBED_SIGNED_FETCH_OPS(fetch_add)
884DO_MBED_SIGNED_FETCH_OPS(fetch_sub)
886DO_MBED_SIGNED_EXPLICIT_FETCH_OPS(exchange)
887DO_MBED_SIGNED_EXPLICIT_FETCH_OPS(fetch_add)
888DO_MBED_SIGNED_EXPLICIT_FETCH_OPS(fetch_sub)
897 return (
bool)core_util_atomic_exchange_explicit_u8((
volatile uint8_t *)valuePtr, desiredValue, order);
902#if MBED_ATOMIC_PTR_SIZE == 32
911#if MBED_ATOMIC_PTR_SIZE == 32
912 return (
void *)core_util_atomic_exchange_explicit_u32((
volatile uint32_t *)valuePtr, (uint32_t)desiredValue, order);
914 return (
void *)core_util_atomic_exchange_explicit_u64((
volatile uint64_t *)valuePtr, (uint64_t)desiredValue, order);
920#if MBED_ATOMIC_PTR_SIZE == 32
929#if MBED_ATOMIC_PTR_SIZE == 32
938#if MBED_ATOMIC_PTR_SIZE == 32
947#if MBED_ATOMIC_PTR_SIZE == 32
948 return (
void *)core_util_atomic_fetch_add_explicit_u32((
volatile uint32_t *)valuePtr, (uint32_t)arg, order);
950 return (
void *)core_util_atomic_fetch_add_explicit_u64((
volatile uint64_t *)valuePtr, (uint64_t)arg, order);
956#if MBED_ATOMIC_PTR_SIZE == 32
965#if MBED_ATOMIC_PTR_SIZE == 32
966 return (
void *)core_util_atomic_fetch_sub_explicit_u32((
volatile uint32_t *)valuePtr, (uint32_t)arg, order);
968 return (
void *)core_util_atomic_fetch_sub_explicit_u64((
volatile uint64_t *)valuePtr, (uint64_t)arg, order);
977 MBED_CHECK_LOAD_ORDER(order);
983 MBED_CHECK_LOAD_ORDER(order);
989 MBED_CHECK_STORE_ORDER(order);
995 MBED_CHECK_STORE_ORDER(order);
999#define DO_MBED_LOCKED_FETCH_OP_ORDERING(name, T, fn_suffix) \
1000MBED_FORCEINLINE T core_util_atomic_##name##_explicit_##fn_suffix( \
1001 volatile T *valuePtr, T arg, MBED_UNUSED mbed_memory_order order) \
1003 return core_util_atomic_##name##_##fn_suffix(valuePtr, arg); \
1006#define DO_MBED_LOCKED_CAS_ORDERING(name, T, fn_suffix) \
1007MBED_FORCEINLINE bool core_util_atomic_##name##_explicit_##fn_suffix( \
1008 volatile T *ptr, T *expectedCurrentValue, T desiredValue, \
1009 MBED_UNUSED mbed_memory_order success, \
1010 MBED_UNUSED mbed_memory_order failure) \
1012 MBED_CHECK_CAS_ORDER(success, failure); \
1013 return core_util_atomic_##name##_##fn_suffix(ptr, expectedCurrentValue, desiredValue); \
1016DO_MBED_LOCKED_FETCH_OP_ORDERINGS(exchange)
1017DO_MBED_LOCKED_FETCH_OP_ORDERINGS(fetch_add)
1018DO_MBED_LOCKED_FETCH_OP_ORDERINGS(fetch_sub)
1019DO_MBED_LOCKED_FETCH_OP_ORDERINGS(fetch_and)
1020DO_MBED_LOCKED_FETCH_OP_ORDERINGS(fetch_or)
1021DO_MBED_LOCKED_FETCH_OP_ORDERINGS(fetch_xor)
1022DO_MBED_LOCKED_CAS_ORDERINGS(cas)
1023DO_MBED_LOCKED_CAS_ORDERINGS(compare_exchange_weak)
1034#define DO_MBED_ATOMIC_LOAD_TEMPLATE(T, fn_suffix) \
1036inline T core_util_atomic_load(const volatile T *valuePtr) noexcept \
1038 return core_util_atomic_load_##fn_suffix(valuePtr); \
1042inline T core_util_atomic_load(const T *valuePtr) noexcept \
1044 return core_util_atomic_load_##fn_suffix(valuePtr); \
1048inline T core_util_atomic_load_explicit(const volatile T *valuePtr, mbed_memory_order order) noexcept \
1050 return core_util_atomic_load_explicit_##fn_suffix(valuePtr, order); \
1054inline T core_util_atomic_load_explicit(const T *valuePtr, mbed_memory_order order) noexcept \
1056 return core_util_atomic_load_explicit_##fn_suffix(valuePtr, order); \
1062 return (T *) core_util_atomic_load_ptr((
void *
const volatile *) valuePtr);
1068 return (T *) core_util_atomic_load_ptr((
void *
const *) valuePtr);
1074 return (T *) core_util_atomic_load_explicit_ptr((
void *
const volatile *) valuePtr, order);
1080 return (T *) core_util_atomic_load_explicit_ptr((
void *
const *) valuePtr, order);
1083DO_MBED_ATOMIC_LOAD_TEMPLATE(uint8_t, u8)
1084DO_MBED_ATOMIC_LOAD_TEMPLATE(uint16_t, u16)
1085DO_MBED_ATOMIC_LOAD_TEMPLATE(uint32_t, u32)
1086DO_MBED_ATOMIC_LOAD_TEMPLATE(uint64_t, u64)
1087DO_MBED_ATOMIC_LOAD_TEMPLATE(int8_t, s8)
1088DO_MBED_ATOMIC_LOAD_TEMPLATE(int16_t, s16)
1089DO_MBED_ATOMIC_LOAD_TEMPLATE(int32_t, s32)
1090DO_MBED_ATOMIC_LOAD_TEMPLATE(int64_t, s64)
1091DO_MBED_ATOMIC_LOAD_TEMPLATE(
bool,
bool)
1093#define DO_MBED_ATOMIC_STORE_TEMPLATE(T, fn_suffix) \
1095inline void core_util_atomic_store(volatile T *valuePtr, T val) noexcept \
1097 core_util_atomic_store_##fn_suffix(valuePtr, val); \
1101inline void core_util_atomic_store(T *valuePtr, T val) noexcept \
1103 core_util_atomic_store_##fn_suffix(valuePtr, val); \
1107inline void core_util_atomic_store_explicit(volatile T *valuePtr, T val, mbed_memory_order order) noexcept \
1109 core_util_atomic_store_explicit_##fn_suffix(valuePtr, val, order); \
1113inline void core_util_atomic_store_explicit(T *valuePtr, T val, mbed_memory_order order) noexcept \
1115 core_util_atomic_store_explicit_##fn_suffix(valuePtr, val, order); \
1119inline void core_util_atomic_store(T *
volatile *valuePtr, T *val)
noexcept
1121 core_util_atomic_store_ptr((
void *
volatile *) valuePtr, val);
1125inline void core_util_atomic_store(T **valuePtr, T *val)
noexcept
1127 core_util_atomic_store_ptr((
void **) valuePtr, val);
1131inline void core_util_atomic_store_explicit(T *
volatile *valuePtr, T *val,
mbed_memory_order order)
noexcept
1133 core_util_atomic_store_ptr((
void *
volatile *) valuePtr, val, order);
1137inline void core_util_atomic_store_explicit(T **valuePtr, T *val,
mbed_memory_order order)
noexcept
1139 core_util_atomic_store_ptr((
void **) valuePtr, val, order);
1142DO_MBED_ATOMIC_STORE_TEMPLATE(uint8_t, u8)
1143DO_MBED_ATOMIC_STORE_TEMPLATE(uint16_t, u16)
1144DO_MBED_ATOMIC_STORE_TEMPLATE(uint32_t, u32)
1145DO_MBED_ATOMIC_STORE_TEMPLATE(uint64_t, u64)
1146DO_MBED_ATOMIC_STORE_TEMPLATE(int8_t, s8)
1147DO_MBED_ATOMIC_STORE_TEMPLATE(int16_t, s16)
1148DO_MBED_ATOMIC_STORE_TEMPLATE(int32_t, s32)
1149DO_MBED_ATOMIC_STORE_TEMPLATE(int64_t, s64)
1150DO_MBED_ATOMIC_STORE_TEMPLATE(
bool,
bool)
1152#define DO_MBED_ATOMIC_CAS_TEMPLATE(tname, fname, T, fn_suffix) \
1154bool core_util_atomic_##tname(volatile T *ptr, T *expectedCurrentValue, T desiredValue) noexcept \
1156 return core_util_atomic_##fname##_##fn_suffix(ptr, expectedCurrentValue, desiredValue); \
1160inline bool core_util_atomic_compare_exchange_strong(T *
volatile *ptr, T **expectedCurrentValue, T *desiredValue)
noexcept
1166inline bool core_util_atomic_compare_exchange_weak(T *
volatile *ptr, T **expectedCurrentValue, T *desiredValue)
noexcept
1171#define DO_MBED_ATOMIC_CAS_TEMPLATES(tname, fname) \
1172 DO_MBED_ATOMIC_CAS_TEMPLATE(tname, fname, uint8_t, u8) \
1173 DO_MBED_ATOMIC_CAS_TEMPLATE(tname, fname, uint16_t, u16) \
1174 DO_MBED_ATOMIC_CAS_TEMPLATE(tname, fname, uint32_t, u32) \
1175 DO_MBED_ATOMIC_CAS_TEMPLATE(tname, fname, uint64_t, u64) \
1176 DO_MBED_ATOMIC_CAS_TEMPLATE(tname, fname, int8_t, s8) \
1177 DO_MBED_ATOMIC_CAS_TEMPLATE(tname, fname, int16_t, s16) \
1178 DO_MBED_ATOMIC_CAS_TEMPLATE(tname, fname, int32_t, s32) \
1179 DO_MBED_ATOMIC_CAS_TEMPLATE(tname, fname, int64_t, s64) \
1180 DO_MBED_ATOMIC_CAS_TEMPLATE(tname, fname, bool, bool)
1182DO_MBED_ATOMIC_CAS_TEMPLATES(compare_exchange_strong, cas)
1183DO_MBED_ATOMIC_CAS_TEMPLATES(compare_exchange_weak, compare_exchange_weak)
1185#define DO_MBED_ATOMIC_OP_TEMPLATE(name, T, fn_suffix) \
1187inline T core_util_atomic_##name(volatile T *valuePtr, T arg) noexcept \
1189 return core_util_atomic_##name##_##fn_suffix(valuePtr, arg); \
1193inline T core_util_atomic_##name##_explicit(volatile T *valuePtr, T arg, \
1194 mbed_memory_order order) noexcept \
1196 return core_util_atomic_##name##_explicit_##fn_suffix(valuePtr, arg, order); \
1201inline bool core_util_atomic_exchange(
volatile bool *valuePtr,
bool arg)
noexcept
1207inline bool core_util_atomic_exchange_explicit(
volatile bool *valuePtr,
bool arg,
mbed_memory_order order)
noexcept
1213inline T *core_util_atomic_exchange(T *
volatile *valuePtr, T *arg)
noexcept
1219inline T *core_util_atomic_exchange_explicit(T *
volatile *valuePtr, T *arg,
mbed_memory_order order)
noexcept
1249#define DO_MBED_ATOMIC_OP_U_TEMPLATES(name) \
1250 DO_MBED_ATOMIC_OP_TEMPLATE(name, uint8_t, u8) \
1251 DO_MBED_ATOMIC_OP_TEMPLATE(name, uint16_t, u16) \
1252 DO_MBED_ATOMIC_OP_TEMPLATE(name, uint32_t, u32) \
1253 DO_MBED_ATOMIC_OP_TEMPLATE(name, uint64_t, u64)
1255#define DO_MBED_ATOMIC_OP_S_TEMPLATES(name) \
1256 DO_MBED_ATOMIC_OP_TEMPLATE(name, int8_t, s8) \
1257 DO_MBED_ATOMIC_OP_TEMPLATE(name, int16_t, s16) \
1258 DO_MBED_ATOMIC_OP_TEMPLATE(name, int32_t, s32) \
1259 DO_MBED_ATOMIC_OP_TEMPLATE(name, int64_t, s64)
1261#define DO_MBED_ATOMIC_MANUAL_PRE_OP_TEMPLATE(name, T, fn_suffix, postname, OP) \
1263inline T core_util_atomic_##name(volatile T *valuePtr, T arg) noexcept \
1265 return core_util_atomic_##postname##_##fn_suffix(valuePtr, arg) OP; \
1269inline T core_util_atomic_##name##_explicit(volatile T *valuePtr, T arg, \
1270 mbed_memory_order order) noexcept \
1272 return core_util_atomic_##postname##_explicit_##fn_suffix(valuePtr, arg, order) OP; \
1275DO_MBED_ATOMIC_OP_U_TEMPLATES(exchange)
1276DO_MBED_ATOMIC_OP_S_TEMPLATES(exchange)
1277DO_MBED_ATOMIC_OP_U_TEMPLATES(fetch_add)
1278DO_MBED_ATOMIC_OP_S_TEMPLATES(fetch_add)
1279DO_MBED_ATOMIC_OP_U_TEMPLATES(fetch_sub)
1280DO_MBED_ATOMIC_OP_S_TEMPLATES(fetch_sub)
1281DO_MBED_ATOMIC_OP_U_TEMPLATES(fetch_and)
1282DO_MBED_ATOMIC_OP_U_TEMPLATES(fetch_or)
1283DO_MBED_ATOMIC_OP_U_TEMPLATES(fetch_xor)
1289#if MBED_EXCLUSIVE_ACCESS
1290#define DO_MBED_ATOMIC_PRE_OP_TEMPLATES(name, postname, OP) \
1291 template<typename T> T core_util_atomic_##name(volatile T *valuePtr, T arg) noexcept; \
1292 template<typename T> T core_util_atomic_##name##_explicit(volatile T *valuePtr, T arg, mbed_memory_order order) noexcept; \
1293 DO_MBED_ATOMIC_OP_TEMPLATE(name, uint8_t, u8) \
1294 DO_MBED_ATOMIC_OP_TEMPLATE(name, uint16_t, u16) \
1295 DO_MBED_ATOMIC_OP_TEMPLATE(name, uint32_t, u32) \
1296 DO_MBED_ATOMIC_MANUAL_PRE_OP_TEMPLATE(name, uint64_t, u64, postname, OP)
1298#define DO_MBED_ATOMIC_PRE_OP_TEMPLATES(name, postname, OP) \
1299 template<typename T> T core_util_atomic_##name(volatile T *valuePtr, T arg) noexcept; \
1300 template<typename T> T core_util_atomic_##name##_explicit(volatile T *valuePtr, T arg, mbed_memory_order order) noexcept; \
1301 DO_MBED_ATOMIC_MANUAL_PRE_OP_TEMPLATE(name, uint8_t, u8, postname, OP) \
1302 DO_MBED_ATOMIC_MANUAL_PRE_OP_TEMPLATE(name, uint16_t, u16, postname, OP) \
1303 DO_MBED_ATOMIC_MANUAL_PRE_OP_TEMPLATE(name, uint32_t, u32, postname, OP) \
1304 DO_MBED_ATOMIC_MANUAL_PRE_OP_TEMPLATE(name, uint64_t, u64, postname, OP)
1308DO_MBED_ATOMIC_PRE_OP_TEMPLATES(incr, fetch_add, + arg)
1309DO_MBED_ATOMIC_PRE_OP_TEMPLATES(decr, fetch_sub, - arg)
1310DO_MBED_ATOMIC_PRE_OP_TEMPLATES(and_fetch, fetch_and, & arg)
1311DO_MBED_ATOMIC_PRE_OP_TEMPLATES(or_fetch, fetch_or, | arg)
1312DO_MBED_ATOMIC_PRE_OP_TEMPLATES(xor_fetch, fetch_xor, ^ arg)
1323#undef DO_MBED_LOCKFREE_EXCHG_ASM
1324#undef DO_MBED_LOCKFREE_NEWVAL_2OP_ASM
1325#undef DO_MBED_LOCKFREE_OLDVAL_3OP_ASM
1326#undef DO_MBED_LOCKFREE_OLDVAL_2OP_ASM
1327#undef DO_MBED_LOCKFREE_CAS_WEAK_ASM
1328#undef DO_MBED_LOCKFREE_CAS_STRONG_ASM
1329#undef DO_MBED_LOCKFREE_LOADSTORE
1330#undef DO_MBED_LOCKFREE_EXCHG_OP
1331#undef DO_MBED_LOCKFREE_CAS_WEAK_OP
1332#undef DO_MBED_LOCKFREE_CAS_STRONG_OP
1333#undef DO_MBED_LOCKFREE_NEWVAL_2OP
1334#undef DO_MBED_LOCKFREE_OLDVAL_2OP
1335#undef DO_MBED_LOCKFREE_OLDVAL_3OP
1336#undef DO_MBED_LOCKFREE_EXCHG_OPS
1337#undef DO_MBED_LOCKFREE_NEWVAL_2OPS
1338#undef DO_MBED_LOCKFREE_OLDVAL_2OPS
1339#undef DO_MBED_LOCKFREE_OLDVAL_3OPS
1340#undef DO_MBED_LOCKFREE_CAS_WEAK_OPS
1341#undef DO_MBED_LOCKFREE_CAS_STRONG_OPS
1342#undef DO_MBED_SIGNED_CAS_OP
1343#undef DO_MBED_SIGNED_CAS_OPS
1344#undef DO_MBED_SIGNED_FETCH_OP
1345#undef DO_MBED_SIGNED_EXPLICIT_FETCH_OP
1346#undef DO_MBED_SIGNED_FETCH_OPS
1347#undef DO_MBED_SIGNED_EXPLICIT_FETCH_OPS
1348#undef DO_MBED_LOCKED_FETCH_OP_ORDERINGS
1349#undef DO_MBED_LOCKED_CAS_ORDERINGS
1350#undef MBED_ACQUIRE_BARRIER
1351#undef MBED_RELEASE_BARRIER
1352#undef MBED_SEQ_CST_BARRIER
1353#undef DO_MBED_ATOMIC_LOAD_TEMPLATE
1354#undef DO_MBED_ATOMIC_STORE_TEMPLATE
1355#undef DO_MBED_ATOMIC_EXCHANGE_TEMPLATE
1356#undef DO_MBED_ATOMIC_CAS_TEMPLATE
1357#undef DO_MBED_ATOMIC_CAS_TEMPLATES
1358#undef DO_MBED_ATOMIC_FETCH_TEMPLATE
1359#undef DO_MBED_ATOMIC_FETCH_U_TEMPLATES
1360#undef DO_MBED_ATOMIC_FETCH_S_TEMPLATES
A lock-free, primitive atomic flag.