Mbed OS Reference
Loading...
Searching...
No Matches
atomic functions

Atomic functions function analogously to C11 and C++11 - loads have acquire semantics, stores have release semantics, and atomic operations are sequentially consistent. More...

Data Structures

struct  core_util_atomic_flag
 A lock-free, primitive atomic flag. More...
 

Macros

#define CORE_UTIL_ATOMIC_FLAG_INIT   { 0 }
 Initializer for a core_util_atomic_flag. More...
 

Typedefs

typedef enum mbed_memory_order mbed_memory_order
 Memory order constraints for atomic operations. More...
 
typedef struct core_util_atomic_flag core_util_atomic_flag
 A lock-free, primitive atomic flag. More...
 

Enumerations

enum  mbed_memory_order
 Memory order constraints for atomic operations. More...
 

Functions

bool core_util_atomic_flag_test_and_set (volatile core_util_atomic_flag *flagPtr)
 Atomic test and set. More...
 
MBED_FORCEINLINE bool core_util_atomic_flag_test_and_set_explicit (volatile core_util_atomic_flag *flagPtr, mbed_memory_order order)
 Atomic test and set. More...
 
MBED_FORCEINLINE void core_util_atomic_flag_clear (volatile core_util_atomic_flag *flagPtr)
 Atomic clear. More...
 
MBED_FORCEINLINE void core_util_atomic_flag_clear_explicit (volatile core_util_atomic_flag *flagPtr, mbed_memory_order order)
 Atomic clear. More...
 
bool core_util_atomic_cas_u8 (volatile uint8_t *ptr, uint8_t *expectedCurrentValue, uint8_t desiredValue)
 Atomic compare and set. More...
 
MBED_FORCEINLINE bool core_util_atomic_cas_explicit_u8 (volatile uint8_t *ptr, uint8_t *expectedCurrentValue, uint8_t desiredValue, mbed_memory_order success, mbed_memory_order failure)
 Atomic compare and set. More...
 
bool core_util_atomic_cas_u16 (volatile uint16_t *ptr, uint16_t *expectedCurrentValue, uint16_t desiredValue)
 Atomic compare and set. More...
 
MBED_FORCEINLINE bool core_util_atomic_cas_explicit_u16 (volatile uint16_t *ptr, uint16_t *expectedCurrentValue, uint16_t desiredValue, mbed_memory_order success, mbed_memory_order failure)
 
bool core_util_atomic_cas_u32 (volatile uint32_t *ptr, uint32_t *expectedCurrentValue, uint32_t desiredValue)
 Atomic compare and set. More...
 
MBED_FORCEINLINE bool core_util_atomic_cas_explicit_u32 (volatile uint32_t *ptr, uint32_t *expectedCurrentValue, uint32_t desiredValue, mbed_memory_order success, mbed_memory_order failure)
 
bool core_util_atomic_cas_u64 (volatile uint64_t *ptr, uint64_t *expectedCurrentValue, uint64_t desiredValue)
 Atomic compare and set. More...
 
MBED_FORCEINLINE bool core_util_atomic_cas_explicit_u64 (volatile uint64_t *ptr, uint64_t *expectedCurrentValue, uint64_t desiredValue, mbed_memory_order success, mbed_memory_order failure)
 
MBED_FORCEINLINE bool core_util_atomic_cas_s8 (volatile int8_t *ptr, int8_t *expectedCurrentValue, int8_t desiredValue)
 Atomic compare and set. More...
 
MBED_FORCEINLINE bool core_util_atomic_cas_explicit_s8 (volatile int8_t *ptr, int8_t *expectedCurrentValue, int8_t desiredValue, mbed_memory_order success, mbed_memory_order failure)
 
MBED_FORCEINLINE bool core_util_atomic_cas_s16 (volatile int16_t *ptr, int16_t *expectedCurrentValue, int16_t desiredValue)
 Atomic compare and set. More...
 
MBED_FORCEINLINE bool core_util_atomic_cas_explicit_s16 (volatile int16_t *ptr, int16_t *expectedCurrentValue, int16_t desiredValue, mbed_memory_order success, mbed_memory_order failure)
 
MBED_FORCEINLINE bool core_util_atomic_cas_s32 (volatile int32_t *ptr, int32_t *expectedCurrentValue, int32_t desiredValue)
 Atomic compare and set. More...
 
MBED_FORCEINLINE bool core_util_atomic_cas_explicit_s32 (volatile int32_t *ptr, int32_t *expectedCurrentValue, int32_t desiredValue, mbed_memory_order success, mbed_memory_order failure)
 
MBED_FORCEINLINE bool core_util_atomic_cas_s64 (volatile int64_t *ptr, int64_t *expectedCurrentValue, int64_t desiredValue)
 Atomic compare and set. More...
 
MBED_FORCEINLINE bool core_util_atomic_cas_explicit_s64 (volatile int64_t *ptr, int64_t *expectedCurrentValue, int64_t desiredValue, mbed_memory_order success, mbed_memory_order failure)
 
MBED_FORCEINLINE bool core_util_atomic_cas_bool (volatile bool *ptr, bool *expectedCurrentValue, bool desiredValue)
 Atomic compare and set. More...
 
MBED_FORCEINLINE bool core_util_atomic_cas_explicit_bool (volatile bool *ptr, bool *expectedCurrentValue, bool desiredValue, mbed_memory_order success, mbed_memory_order failure)
 
bool core_util_atomic_cas_ptr (void *volatile *ptr, void **expectedCurrentValue, void *desiredValue)
 Atomic compare and set. More...
 
MBED_FORCEINLINE bool core_util_atomic_cas_explicit_ptr (void *volatile *ptr, void **expectedCurrentValue, void *desiredValue, mbed_memory_order success, mbed_memory_order failure)
 
bool core_util_atomic_compare_exchange_weak_u8 (volatile uint8_t *ptr, uint8_t *expectedCurrentValue, uint8_t desiredValue)
 Atomic compare and set. More...
 
MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_u8 (volatile uint8_t *ptr, uint8_t *expectedCurrentValue, uint8_t desiredValue, mbed_memory_order success, mbed_memory_order failure)
 Atomic compare and set. More...
 
bool core_util_atomic_compare_exchange_weak_u16 (volatile uint16_t *ptr, uint16_t *expectedCurrentValue, uint16_t desiredValue)
 Atomic compare and set. More...
 
MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_u16 (volatile uint16_t *ptr, uint16_t *expectedCurrentValue, uint16_t desiredValue, mbed_memory_order success, mbed_memory_order failure)
 
bool core_util_atomic_compare_exchange_weak_u32 (volatile uint32_t *ptr, uint32_t *expectedCurrentValue, uint32_t desiredValue)
 Atomic compare and set. More...
 
MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_u32 (volatile uint32_t *ptr, uint32_t *expectedCurrentValue, uint32_t desiredValue, mbed_memory_order success, mbed_memory_order failure)
 
bool core_util_atomic_compare_exchange_weak_u64 (volatile uint64_t *ptr, uint64_t *expectedCurrentValue, uint64_t desiredValue)
 Atomic compare and set. More...
 
MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_u64 (volatile uint64_t *ptr, uint64_t *expectedCurrentValue, uint64_t desiredValue, mbed_memory_order success, mbed_memory_order failure)
 
MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_s8 (volatile int8_t *ptr, int8_t *expectedCurrentValue, int8_t desiredValue)
 Atomic compare and set. More...
 
MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_s8 (volatile int8_t *ptr, int8_t *expectedCurrentValue, int8_t desiredValue, mbed_memory_order success, mbed_memory_order failure)
 
MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_s16 (volatile int16_t *ptr, int16_t *expectedCurrentValue, int16_t desiredValue)
 Atomic compare and set. More...
 
MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_s16 (volatile int16_t *ptr, int16_t *expectedCurrentValue, int16_t desiredValue, mbed_memory_order success, mbed_memory_order failure)
 
MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_s32 (volatile int32_t *ptr, int32_t *expectedCurrentValue, int32_t desiredValue)
 Atomic compare and set. More...
 
MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_s32 (volatile int32_t *ptr, int32_t *expectedCurrentValue, int32_t desiredValue, mbed_memory_order success, mbed_memory_order failure)
 
MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_s64 (volatile int64_t *ptr, int64_t *expectedCurrentValue, int64_t desiredValue)
 Atomic compare and set. More...
 
MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_s64 (volatile int64_t *ptr, int64_t *expectedCurrentValue, int64_t desiredValue, mbed_memory_order success, mbed_memory_order failure)
 
MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_bool (volatile bool *ptr, bool *expectedCurrentValue, bool desiredValue)
 Atomic compare and set. More...
 
MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_bool (volatile bool *ptr, bool *expectedCurrentValue, bool desiredValue, mbed_memory_order success, mbed_memory_order failure)
 
MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_ptr (void *volatile *ptr, void **expectedCurrentValue, void *desiredValue)
 Atomic compare and set. More...
 
MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_ptr (void *volatile *ptr, void **expectedCurrentValue, void *desiredValue, mbed_memory_order success, mbed_memory_order failure)
 
MBED_FORCEINLINE uint8_t core_util_atomic_load_u8 (const volatile uint8_t *valuePtr)
 Atomic load. More...
 
MBED_FORCEINLINE uint8_t core_util_atomic_load_explicit_u8 (const volatile uint8_t *valuePtr, mbed_memory_order order)
 Atomic load with explicit ordering. More...
 
MBED_FORCEINLINE uint16_t core_util_atomic_load_u16 (const volatile uint16_t *valuePtr)
 
MBED_FORCEINLINE uint16_t core_util_atomic_load_explicit_u16 (const volatile uint16_t *valuePtr, mbed_memory_order order)
 
MBED_FORCEINLINE uint32_t core_util_atomic_load_u32 (const volatile uint32_t *valuePtr)
 
MBED_FORCEINLINE uint32_t core_util_atomic_load_explicit_u32 (const volatile uint32_t *valuePtr, mbed_memory_order order)
 
uint64_t core_util_atomic_load_u64 (const volatile uint64_t *valuePtr)
 
MBED_FORCEINLINE uint64_t core_util_atomic_load_explicit_u64 (const volatile uint64_t *valuePtr, mbed_memory_order order)
 
MBED_FORCEINLINE int8_t core_util_atomic_load_s8 (const volatile int8_t *valuePtr)
 
MBED_FORCEINLINE int8_t core_util_atomic_load_explicit_s8 (const volatile int8_t *valuePtr, mbed_memory_order order)
 
MBED_FORCEINLINE int16_t core_util_atomic_load_s16 (const volatile int16_t *valuePtr)
 
MBED_FORCEINLINE int16_t core_util_atomic_load_explicit_s16 (const volatile int16_t *valuePtr, mbed_memory_order order)
 
MBED_FORCEINLINE int32_t core_util_atomic_load_s32 (const volatile int32_t *valuePtr)
 
MBED_FORCEINLINE int32_t core_util_atomic_load_explicit_s32 (const volatile int32_t *valuePtr, mbed_memory_order order)
 
MBED_FORCEINLINE int64_t core_util_atomic_load_s64 (const volatile int64_t *valuePtr)
 
MBED_FORCEINLINE int64_t core_util_atomic_load_explicit_s64 (const volatile int64_t *valuePtr, MBED_UNUSED mbed_memory_order order)
 Atomic load with explicit ordering. More...
 
MBED_FORCEINLINE bool core_util_atomic_load_bool (const volatile bool *valuePtr)
 
MBED_FORCEINLINE bool core_util_atomic_load_explicit_bool (const volatile bool *valuePtr, mbed_memory_order order)
 
MBED_FORCEINLINE void * core_util_atomic_load_ptr (void *const volatile *valuePtr)
 
MBED_FORCEINLINE void * core_util_atomic_load_explicit_ptr (void *const volatile *valuePtr, mbed_memory_order order)
 
MBED_FORCEINLINE void core_util_atomic_store_u8 (volatile uint8_t *valuePtr, uint8_t desiredValue)
 Atomic store. More...
 
MBED_FORCEINLINE void core_util_atomic_store_explicit_u8 (volatile uint8_t *valuePtr, uint8_t desiredValue, mbed_memory_order order)
 Atomic store with explicit ordering. More...
 
MBED_FORCEINLINE void core_util_atomic_store_u16 (volatile uint16_t *valuePtr, uint16_t desiredValue)
 
MBED_FORCEINLINE void core_util_atomic_store_explicit_u16 (volatile uint16_t *valuePtr, uint16_t desiredValue, mbed_memory_order order)
 
MBED_FORCEINLINE void core_util_atomic_store_u32 (volatile uint32_t *valuePtr, uint32_t desiredValue)
 
MBED_FORCEINLINE void core_util_atomic_store_explicit_u32 (volatile uint32_t *valuePtr, uint32_t desiredValue, mbed_memory_order order)
 
void core_util_atomic_store_u64 (volatile uint64_t *valuePtr, uint64_t desiredValue)
 
MBED_FORCEINLINE void core_util_atomic_store_explicit_u64 (volatile uint64_t *valuePtr, uint64_t desiredValue, mbed_memory_order order)
 
MBED_FORCEINLINE void core_util_atomic_store_s8 (volatile int8_t *valuePtr, int8_t desiredValue)
 
MBED_FORCEINLINE void core_util_atomic_store_explicit_s8 (volatile int8_t *valuePtr, int8_t desiredValue, mbed_memory_order order)
 
MBED_FORCEINLINE void core_util_atomic_store_s16 (volatile int16_t *valuePtr, int16_t desiredValue)
 
MBED_FORCEINLINE void core_util_atomic_store_explicit_s16 (volatile int16_t *valuePtr, int16_t desiredValue, mbed_memory_order order)
 
MBED_FORCEINLINE void core_util_atomic_store_s32 (volatile int32_t *valuePtr, int32_t desiredValue)
 
MBED_FORCEINLINE void core_util_atomic_store_explicit_s32 (volatile int32_t *valuePtr, int32_t desiredValue, mbed_memory_order order)
 
MBED_FORCEINLINE void core_util_atomic_store_s64 (volatile int64_t *valuePtr, int64_t desiredValue)
 
MBED_FORCEINLINE void core_util_atomic_store_explicit_s64 (volatile int64_t *valuePtr, int64_t desiredValue, mbed_memory_order order)
 
MBED_FORCEINLINE void core_util_atomic_store_bool (volatile bool *valuePtr, bool desiredValue)
 
MBED_FORCEINLINE void core_util_atomic_store_explicit_bool (volatile bool *valuePtr, bool desiredValue, mbed_memory_order order)
 
MBED_FORCEINLINE void core_util_atomic_store_ptr (void *volatile *valuePtr, void *desiredValue)
 
MBED_FORCEINLINE void core_util_atomic_store_explicit_ptr (void *volatile *valuePtr, void *desiredValue, mbed_memory_order order)
 
uint8_t core_util_atomic_exchange_u8 (volatile uint8_t *valuePtr, uint8_t desiredValue)
 Atomic exchange. More...
 
MBED_FORCEINLINE uint8_t core_util_atomic_exchange_explicit_u8 (volatile uint8_t *valuePtr, uint8_t desiredValue, mbed_memory_order order)
 Atomic exchange. More...
 
uint16_t core_util_atomic_exchange_u16 (volatile uint16_t *valuePtr, uint16_t desiredValue)
 Atomic exchange. More...
 
MBED_FORCEINLINE uint16_t core_util_atomic_exchange_explicit_u16 (volatile uint16_t *valuePtr, uint16_t desiredValue, mbed_memory_order order)
 
uint32_t core_util_atomic_exchange_u32 (volatile uint32_t *valuePtr, uint32_t desiredValue)
 Atomic exchange. More...
 
MBED_FORCEINLINE uint32_t core_util_atomic_exchange_explicit_u32 (volatile uint32_t *valuePtr, uint32_t desiredValue, mbed_memory_order order)
 
uint64_t core_util_atomic_exchange_u64 (volatile uint64_t *valuePtr, uint64_t desiredValue)
 Atomic exchange. More...
 
MBED_FORCEINLINE uint64_t core_util_atomic_exchange_explicit_u64 (volatile uint64_t *valuePtr, uint64_t desiredValue, mbed_memory_order order)
 
MBED_FORCEINLINE int8_t core_util_atomic_exchange_s8 (volatile int8_t *valuePtr, int8_t desiredValue)
 Atomic exchange. More...
 
MBED_FORCEINLINE int8_t core_util_atomic_exchange_explicit_s8 (volatile int8_t *valuePtr, int8_t desiredValue, mbed_memory_order order)
 
MBED_FORCEINLINE int16_t core_util_atomic_exchange_s16 (volatile int16_t *valuePtr, int16_t desiredValue)
 Atomic exchange. More...
 
MBED_FORCEINLINE int16_t core_util_atomic_exchange_explicit_s16 (volatile int16_t *valuePtr, int16_t desiredValue, mbed_memory_order order)
 
MBED_FORCEINLINE int32_t core_util_atomic_exchange_s32 (volatile int32_t *valuePtr, int32_t desiredValue)
 Atomic exchange. More...
 
MBED_FORCEINLINE int32_t core_util_atomic_exchange_explicit_s32 (volatile int32_t *valuePtr, int32_t desiredValue, mbed_memory_order order)
 
MBED_FORCEINLINE int64_t core_util_atomic_exchange_s64 (volatile int64_t *valuePtr, int64_t desiredValue)
 Atomic exchange. More...
 
MBED_FORCEINLINE int64_t core_util_atomic_exchange_explicit_s64 (volatile int64_t *valuePtr, int64_t desiredValue, mbed_memory_order order)
 
MBED_FORCEINLINE bool core_util_atomic_exchange_bool (volatile bool *valuePtr, bool desiredValue)
 Atomic exchange. More...
 
MBED_FORCEINLINE bool core_util_atomic_exchange_explicit_bool (volatile bool *valuePtr, bool desiredValue, mbed_memory_order order)
 
void * core_util_atomic_exchange_ptr (void *volatile *valuePtr, void *desiredValue)
 Atomic exchange. More...
 
MBED_FORCEINLINE void * core_util_atomic_exchange_explicit_ptr (void *volatile *valuePtr, void *desiredValue, mbed_memory_order order)
 
uint8_t core_util_atomic_incr_u8 (volatile uint8_t *valuePtr, uint8_t delta)
 Atomic increment. More...
 
uint16_t core_util_atomic_incr_u16 (volatile uint16_t *valuePtr, uint16_t delta)
 Atomic increment. More...
 
uint32_t core_util_atomic_incr_u32 (volatile uint32_t *valuePtr, uint32_t delta)
 Atomic increment. More...
 
uint64_t core_util_atomic_incr_u64 (volatile uint64_t *valuePtr, uint64_t delta)
 Atomic increment. More...
 
MBED_FORCEINLINE int8_t core_util_atomic_incr_s8 (volatile int8_t *valuePtr, int8_t delta)
 Atomic increment. More...
 
MBED_FORCEINLINE int16_t core_util_atomic_incr_s16 (volatile int16_t *valuePtr, int16_t delta)
 Atomic increment. More...
 
MBED_FORCEINLINE int32_t core_util_atomic_incr_s32 (volatile int32_t *valuePtr, int32_t delta)
 Atomic increment. More...
 
MBED_FORCEINLINE int64_t core_util_atomic_incr_s64 (volatile int64_t *valuePtr, int64_t delta)
 Atomic increment. More...
 
void * core_util_atomic_incr_ptr (void *volatile *valuePtr, ptrdiff_t delta)
 Atomic increment. More...
 
uint8_t core_util_atomic_decr_u8 (volatile uint8_t *valuePtr, uint8_t delta)
 Atomic decrement. More...
 
uint16_t core_util_atomic_decr_u16 (volatile uint16_t *valuePtr, uint16_t delta)
 Atomic decrement. More...
 
uint32_t core_util_atomic_decr_u32 (volatile uint32_t *valuePtr, uint32_t delta)
 Atomic decrement. More...
 
uint64_t core_util_atomic_decr_u64 (volatile uint64_t *valuePtr, uint64_t delta)
 Atomic decrement. More...
 
MBED_FORCEINLINE int8_t core_util_atomic_decr_s8 (volatile int8_t *valuePtr, int8_t delta)
 Atomic decrement. More...
 
MBED_FORCEINLINE int16_t core_util_atomic_decr_s16 (volatile int16_t *valuePtr, int16_t delta)
 Atomic decrement. More...
 
MBED_FORCEINLINE int32_t core_util_atomic_decr_s32 (volatile int32_t *valuePtr, int32_t delta)
 Atomic decrement. More...
 
MBED_FORCEINLINE int64_t core_util_atomic_decr_s64 (volatile int64_t *valuePtr, int64_t delta)
 Atomic decrement. More...
 
void * core_util_atomic_decr_ptr (void *volatile *valuePtr, ptrdiff_t delta)
 Atomic decrement. More...
 
uint8_t core_util_atomic_fetch_add_u8 (volatile uint8_t *valuePtr, uint8_t arg)
 Atomic add. More...
 
MBED_FORCEINLINE uint8_t core_util_atomic_fetch_add_explicit_u8 (volatile uint8_t *valuePtr, uint8_t arg, mbed_memory_order order)
 Atomic add. More...
 
uint16_t core_util_atomic_fetch_add_u16 (volatile uint16_t *valuePtr, uint16_t arg)
 Atomic add. More...
 
MBED_FORCEINLINE uint16_t core_util_atomic_fetch_add_explicit_u16 (volatile uint16_t *valuePtr, uint16_t arg, mbed_memory_order order)
 
uint32_t core_util_atomic_fetch_add_u32 (volatile uint32_t *valuePtr, uint32_t arg)
 Atomic add. More...
 
MBED_FORCEINLINE uint32_t core_util_atomic_fetch_add_explicit_u32 (volatile uint32_t *valuePtr, uint32_t arg, mbed_memory_order order)
 
uint64_t core_util_atomic_fetch_add_u64 (volatile uint64_t *valuePtr, uint64_t arg)
 Atomic add. More...
 
MBED_FORCEINLINE uint64_t core_util_atomic_fetch_add_explicit_u64 (volatile uint64_t *valuePtr, uint64_t arg, mbed_memory_order order)
 
MBED_FORCEINLINE int8_t core_util_atomic_fetch_add_s8 (volatile int8_t *valuePtr, int8_t arg)
 Atomic add. More...
 
MBED_FORCEINLINE int8_t core_util_atomic_fetch_add_explicit_s8 (volatile int8_t *valuePtr, int8_t arg, mbed_memory_order order)
 
MBED_FORCEINLINE int16_t core_util_atomic_fetch_add_s16 (volatile int16_t *valuePtr, int16_t arg)
 Atomic add. More...
 
MBED_FORCEINLINE int16_t core_util_atomic_fetch_add_explicit_s16 (volatile int16_t *valuePtr, int16_t arg, mbed_memory_order order)
 
MBED_FORCEINLINE int32_t core_util_atomic_fetch_add_s32 (volatile int32_t *valuePtr, int32_t arg)
 Atomic add. More...
 
MBED_FORCEINLINE int32_t core_util_atomic_fetch_add_explicit_s32 (volatile int32_t *valuePtr, int32_t arg, mbed_memory_order order)
 
MBED_FORCEINLINE int64_t core_util_atomic_fetch_add_s64 (volatile int64_t *valuePtr, int64_t arg)
 Atomic add. More...
 
MBED_FORCEINLINE int64_t core_util_atomic_fetch_add_explicit_s64 (volatile int64_t *valuePtr, int64_t arg, mbed_memory_order order)
 
MBED_FORCEINLINE void * core_util_atomic_fetch_add_ptr (void *volatile *valuePtr, ptrdiff_t arg)
 Atomic add. More...
 
MBED_FORCEINLINE void * core_util_atomic_fetch_add_explicit_ptr (void *volatile *valuePtr, ptrdiff_t arg, mbed_memory_order order)
 
uint8_t core_util_atomic_fetch_sub_u8 (volatile uint8_t *valuePtr, uint8_t arg)
 Atomic subtract. More...
 
MBED_FORCEINLINE uint8_t core_util_atomic_fetch_sub_explicit_u8 (volatile uint8_t *valuePtr, uint8_t arg, mbed_memory_order order)
 Atomic subtract. More...
 
uint16_t core_util_atomic_fetch_sub_u16 (volatile uint16_t *valuePtr, uint16_t arg)
 Atomic subtract. More...
 
MBED_FORCEINLINE uint16_t core_util_atomic_fetch_sub_explicit_u16 (volatile uint16_t *valuePtr, uint16_t arg, mbed_memory_order order)
 
uint32_t core_util_atomic_fetch_sub_u32 (volatile uint32_t *valuePtr, uint32_t arg)
 Atomic subtract. More...
 
MBED_FORCEINLINE uint32_t core_util_atomic_fetch_sub_explicit_u32 (volatile uint32_t *valuePtr, uint32_t arg, mbed_memory_order order)
 
uint64_t core_util_atomic_fetch_sub_u64 (volatile uint64_t *valuePtr, uint64_t arg)
 Atomic subtract. More...
 
MBED_FORCEINLINE uint64_t core_util_atomic_fetch_sub_explicit_u64 (volatile uint64_t *valuePtr, uint64_t arg, mbed_memory_order order)
 
MBED_FORCEINLINE int8_t core_util_atomic_fetch_sub_s8 (volatile int8_t *valuePtr, int8_t arg)
 Atomic subtract. More...
 
MBED_FORCEINLINE int8_t core_util_atomic_fetch_sub_explicit_s8 (volatile int8_t *valuePtr, int8_t arg, mbed_memory_order order)
 
MBED_FORCEINLINE int16_t core_util_atomic_fetch_sub_s16 (volatile int16_t *valuePtr, int16_t arg)
 Atomic subtract. More...
 
MBED_FORCEINLINE int16_t core_util_atomic_fetch_sub_explicit_s16 (volatile int16_t *valuePtr, int16_t arg, mbed_memory_order order)
 
MBED_FORCEINLINE int32_t core_util_atomic_fetch_sub_s32 (volatile int32_t *valuePtr, int32_t arg)
 Atomic subtract. More...
 
MBED_FORCEINLINE int32_t core_util_atomic_fetch_sub_explicit_s32 (volatile int32_t *valuePtr, int32_t arg, mbed_memory_order order)
 
MBED_FORCEINLINE int64_t core_util_atomic_fetch_sub_s64 (volatile int64_t *valuePtr, int64_t arg)
 Atomic subtract. More...
 
MBED_FORCEINLINE int64_t core_util_atomic_fetch_sub_explicit_s64 (volatile int64_t *valuePtr, int64_t arg, mbed_memory_order order)
 
MBED_FORCEINLINE void * core_util_atomic_fetch_sub_ptr (void *volatile *valuePtr, ptrdiff_t arg)
 Atomic subtract. More...
 
MBED_FORCEINLINE void * core_util_atomic_fetch_sub_explicit_ptr (void *volatile *valuePtr, ptrdiff_t arg, mbed_memory_order order)
 
uint8_t core_util_atomic_fetch_and_u8 (volatile uint8_t *valuePtr, uint8_t arg)
 Atomic bitwise and. More...
 
MBED_FORCEINLINE uint8_t core_util_atomic_fetch_and_explicit_u8 (volatile uint8_t *valuePtr, uint8_t arg, mbed_memory_order order)
 Atomic bitwise and. More...
 
uint16_t core_util_atomic_fetch_and_u16 (volatile uint16_t *valuePtr, uint16_t arg)
 Atomic bitwise and. More...
 
MBED_FORCEINLINE uint16_t core_util_atomic_fetch_and_explicit_u16 (volatile uint16_t *valuePtr, uint16_t arg, mbed_memory_order order)
 
uint32_t core_util_atomic_fetch_and_u32 (volatile uint32_t *valuePtr, uint32_t arg)
 Atomic bitwise and. More...
 
MBED_FORCEINLINE uint32_t core_util_atomic_fetch_and_explicit_u32 (volatile uint32_t *valuePtr, uint32_t arg, mbed_memory_order order)
 
uint64_t core_util_atomic_fetch_and_u64 (volatile uint64_t *valuePtr, uint64_t arg)
 Atomic bitwise and. More...
 
MBED_FORCEINLINE uint64_t core_util_atomic_fetch_and_explicit_u64 (volatile uint64_t *valuePtr, uint64_t arg, mbed_memory_order order)
 
uint8_t core_util_atomic_fetch_or_u8 (volatile uint8_t *valuePtr, uint8_t arg)
 Atomic bitwise inclusive or. More...
 
MBED_FORCEINLINE uint8_t core_util_atomic_fetch_or_explicit_u8 (volatile uint8_t *valuePtr, uint8_t arg, mbed_memory_order order)
 Atomic bitwise inclusive or. More...
 
uint16_t core_util_atomic_fetch_or_u16 (volatile uint16_t *valuePtr, uint16_t arg)
 Atomic bitwise inclusive or. More...
 
MBED_FORCEINLINE uint16_t core_util_atomic_fetch_or_explicit_u16 (volatile uint16_t *valuePtr, uint16_t arg, mbed_memory_order order)
 
uint32_t core_util_atomic_fetch_or_u32 (volatile uint32_t *valuePtr, uint32_t arg)
 Atomic bitwise inclusive or. More...
 
MBED_FORCEINLINE uint32_t core_util_atomic_fetch_or_explicit_u32 (volatile uint32_t *valuePtr, uint32_t arg, mbed_memory_order order)
 
uint64_t core_util_atomic_fetch_or_u64 (volatile uint64_t *valuePtr, uint64_t arg)
 Atomic bitwise inclusive or. More...
 
MBED_FORCEINLINE uint64_t core_util_atomic_fetch_or_explicit_u64 (volatile uint64_t *valuePtr, uint64_t arg, mbed_memory_order order)
 
uint8_t core_util_atomic_fetch_xor_u8 (volatile uint8_t *valuePtr, uint8_t arg)
 Atomic bitwise exclusive or. More...
 
MBED_FORCEINLINE uint8_t core_util_atomic_fetch_xor_explicit_u8 (volatile uint8_t *valuePtr, uint8_t arg, mbed_memory_order order)
 Atomic bitwise exclusive or. More...
 
uint16_t core_util_atomic_fetch_xor_u16 (volatile uint16_t *valuePtr, uint16_t arg)
 Atomic bitwise exclusive or. More...
 
MBED_FORCEINLINE uint16_t core_util_atomic_fetch_xor_explicit_u16 (volatile uint16_t *valuePtr, uint16_t arg, mbed_memory_order order)
 
uint32_t core_util_atomic_fetch_xor_u32 (volatile uint32_t *valuePtr, uint32_t arg)
 Atomic bitwise exclusive or. More...
 
MBED_FORCEINLINE uint32_t core_util_atomic_fetch_xor_explicit_u32 (volatile uint32_t *valuePtr, uint32_t arg, mbed_memory_order order)
 
uint64_t core_util_atomic_fetch_xor_u64 (volatile uint64_t *valuePtr, uint64_t arg)
 Atomic bitwise exclusive or. More...
 
MBED_FORCEINLINE uint64_t core_util_atomic_fetch_xor_explicit_u64 (volatile uint64_t *valuePtr, uint64_t arg, mbed_memory_order order)
 
template<typename T >
core_util_atomic_load (const volatile T *valuePtr) noexcept
 
template<typename T >
core_util_atomic_load (const T *valuePtr) noexcept
 
template<typename T >
void core_util_atomic_store (volatile T *valuePtr, mstd::type_identity_t< T > desiredValue) noexcept
 
template<typename T >
void core_util_atomic_store (T *valuePtr, mstd::type_identity_t< T > desiredValue) noexcept
 
template<typename T >
core_util_atomic_exchange (volatile T *valuePtr, mstd::type_identity_t< T > desiredValue) noexcept
 Atomic exchange. More...
 
template<typename T >
bool core_util_atomic_compare_exchange_strong (volatile T *ptr, mstd::type_identity_t< T > *expectedCurrentValue, mstd::type_identity_t< T > desiredValue) noexcept
 Atomic compare and set. More...
 
template<typename T >
bool core_util_atomic_compare_exchange_weak (volatile T *ptr, mstd::type_identity_t< T > *expectedCurrentValue, mstd::type_identity_t< T > desiredValue) noexcept
 Atomic compare and set. More...
 
template<typename T >
core_util_atomic_fetch_add (volatile T *valuePtr, mstd::type_identity_t< T > arg) noexcept
 Atomic add. More...
 
template<typename T >
core_util_atomic_fetch_sub (volatile T *valuePtr, mstd::type_identity_t< T > arg) noexcept
 Atomic subtract. More...
 
template<typename T >
core_util_atomic_fetch_and (volatile T *valuePtr, mstd::type_identity_t< T > arg) noexcept
 Atomic bitwise and. More...
 
template<typename T >
core_util_atomic_fetch_or (volatile T *valuePtr, mstd::type_identity_t< T > arg) noexcept
 Atomic bitwise inclusive or. More...
 
template<typename T >
core_util_atomic_fetch_xor (volatile T *valuePtr, mstd::type_identity_t< T > arg) noexcept
 Atomic bitwise exclusive or. More...
 
template<typename T >
core_util_atomic_load_explicit (const volatile T *valuePtr, mbed_memory_order order) noexcept
 
template<typename T >
core_util_atomic_load_explicit (const T *valuePtr, mbed_memory_order order) noexcept
 
template<typename T >
void core_util_atomic_store_explicit (volatile T *valuePtr, mstd::type_identity_t< T > desiredValue, mbed_memory_order order) noexcept
 
template<typename T >
void core_util_atomic_store_explicit (T *valuePtr, mstd::type_identity_t< T > desiredValue, mbed_memory_order order) noexcept
 
template<typename T >
core_util_atomic_exchange_explicit (volatile T *valuePtr, mstd::type_identity_t< T > desiredValue, mbed_memory_order order) noexcept
 
template<typename T >
bool core_util_atomic_compare_exchange_strong_explicit (volatile T *ptr, mstd::type_identity_t< T > *expectedCurrentValue, mstd::type_identity_t< T > desiredValue, mbed_memory_order success, mbed_memory_order failure) noexcept
 
template<typename T >
bool core_util_atomic_compare_exchange_weak_explicit (volatile T *ptr, mstd::type_identity_t< T > *expectedCurrentValue, mstd::type_identity_t< T > desiredValue, mbed_memory_order success, mbed_memory_order failure) noexcept
 
template<typename T >
core_util_atomic_fetch_add_explicit (volatile T *valuePtr, mstd::type_identity_t< T > arg, mbed_memory_order order) noexcept
 
template<typename T >
core_util_atomic_fetch_sub_explicit (volatile T *valuePtr, mstd::type_identity_t< T > arg, mbed_memory_order order) noexcept
 
template<typename T >
core_util_atomic_fetch_and_explicit (volatile T *valuePtr, mstd::type_identity_t< T > arg, mbed_memory_order order) noexcept
 
template<typename T >
core_util_atomic_fetch_or_explicit (volatile T *valuePtr, mstd::type_identity_t< T > arg, mbed_memory_order order) noexcept
 
template<typename T >
core_util_atomic_fetch_xor_explicit (volatile T *valuePtr, mstd::type_identity_t< T > arg, mbed_memory_order order) noexcept
 
template<typename T >
T * core_util_atomic_load (T *const volatile *valuePtr) noexcept
 
template<typename T >
T * core_util_atomic_load (T *const *valuePtr) noexcept
 
template<typename T >
void core_util_atomic_store (T *volatile *valuePtr, mstd::type_identity_t< T > *desiredValue) noexcept
 
template<typename T >
void core_util_atomic_store (T **valuePtr, mstd::type_identity_t< T > *desiredValue) noexcept
 
template<typename T >
T * core_util_atomic_exchange (T *volatile *valuePtr, mstd::type_identity_t< T > *desiredValue) noexcept
 Atomic exchange. More...
 
template<typename T >
bool core_util_atomic_compare_exchange_strong (T *volatile *ptr, mstd::type_identity_t< T > **expectedCurrentValue, mstd::type_identity_t< T > *desiredValue) noexcept
 Atomic compare and set. More...
 
template<typename T >
bool core_util_atomic_compare_exchange_weak (T *volatile *ptr, mstd::type_identity_t< T > **expectedCurrentValue, mstd::type_identity_t< T > *desiredValue) noexcept
 Atomic compare and set. More...
 
template<typename T >
T * core_util_atomic_fetch_add (T *volatile *valuePtr, ptrdiff_t arg) noexcept
 Atomic add. More...
 
template<typename T >
T * core_util_atomic_fetch_sub (T *volatile *valuePtr, ptrdiff_t arg) noexcept
 Atomic subtract. More...
 
template<typename T >
T * core_util_atomic_load_explicit (T *const volatile *valuePtr, mbed_memory_order order) noexcept
 
template<typename T >
T * core_util_atomic_load_explicit (T *const *valuePtr, mbed_memory_order order) noexcept
 
template<typename T >
void core_util_atomic_store_explicit (T *volatile *valuePtr, mstd::type_identity_t< T > *desiredValue, mbed_memory_order order) noexcept
 
template<typename T >
void core_util_atomic_store_explicit (T **valuePtr, mstd::type_identity_t< T > *desiredValue, mbed_memory_order order) noexcept
 
template<typename T >
T * core_util_atomic_exchange_explicit (T *volatile *valuePtr, mstd::type_identity_t< T > *desiredValue, mbed_memory_order order) noexcept
 
template<typename T >
bool core_util_atomic_compare_exchange_strong_explicit (T *volatile *ptr, mstd::type_identity_t< T > **expectedCurrentValue, mstd::type_identity_t< T > *desiredValue, mbed_memory_order success, mbed_memory_order failure) noexcept
 
template<typename T >
bool core_util_atomic_compare_exchange_weak_explicit (T *volatile *ptr, mstd::type_identity_t< T > **expectedCurrentValue, mstd::type_identity_t< T > *desiredValue, mbed_memory_order success, mbed_memory_order failure) noexcept
 
template<typename T >
T * core_util_atomic_fetch_add_explicit (T *volatile *valuePtr, ptrdiff_t arg, mbed_memory_order order) noexcept
 
template<typename T >
T * core_util_atomic_fetch_sub_explicit (T *volatile *valuePtr, ptrdiff_t arg, mbed_memory_order order) noexcept
 

Detailed Description

Atomic functions function analogously to C11 and C++11 - loads have acquire semantics, stores have release semantics, and atomic operations are sequentially consistent.

Atomicity is enforced both between threads and interrupt handlers.

Macro Definition Documentation

◆ CORE_UTIL_ATOMIC_FLAG_INIT

#define CORE_UTIL_ATOMIC_FLAG_INIT   { 0 }

Initializer for a core_util_atomic_flag.

Example:

#define CORE_UTIL_ATOMIC_FLAG_INIT
Initializer for a core_util_atomic_flag.
Definition: mbed_atomic.h:130
A lock-free, primitive atomic flag.
Definition: mbed_atomic.h:118

Definition at line 130 of file mbed_atomic.h.

Typedef Documentation

◆ mbed_memory_order

Memory order constraints for atomic operations.

Intended semantics are as per C++11.

◆ core_util_atomic_flag

A lock-free, primitive atomic flag.

Emulate C11's atomic_flag. The flag is initially in an indeterminate state unless explicitly initialized with CORE_UTIL_ATOMIC_FLAG_INIT.

Enumeration Type Documentation

◆ mbed_memory_order

Memory order constraints for atomic operations.

Intended semantics are as per C++11.

Definition at line 51 of file mbed_atomic.h.

Function Documentation

◆ core_util_atomic_flag_test_and_set()

bool core_util_atomic_flag_test_and_set ( volatile core_util_atomic_flag flagPtr)

Atomic test and set.

Atomically tests then sets the flag to true, returning the previous value.

Parameters
flagPtrTarget flag being tested and set.
Returns
The previous value.

◆ core_util_atomic_flag_test_and_set_explicit()

MBED_FORCEINLINE bool core_util_atomic_flag_test_and_set_explicit ( volatile core_util_atomic_flag flagPtr,
mbed_memory_order  order 
)

Atomic test and set.

Atomically tests then sets the flag to true, returning the previous value.

Parameters
flagPtrTarget flag being tested and set.
Returns
The previous value.
Parameters
ordermemory ordering constraint

Definition at line 634 of file mbed_atomic_impl.h.

◆ core_util_atomic_flag_clear()

MBED_FORCEINLINE void core_util_atomic_flag_clear ( volatile core_util_atomic_flag flagPtr)

Atomic clear.

Parameters
flagPtrTarget flag being cleared.

Definition at line 675 of file mbed_atomic_impl.h.

◆ core_util_atomic_flag_clear_explicit()

MBED_FORCEINLINE void core_util_atomic_flag_clear_explicit ( volatile core_util_atomic_flag flagPtr,
mbed_memory_order  order 
)

Atomic clear.

Parameters
flagPtrTarget flag being cleared.
ordermemory ordering constraint

Definition at line 682 of file mbed_atomic_impl.h.

◆ core_util_atomic_cas_u8()

bool core_util_atomic_cas_u8 ( volatile uint8_t *  ptr,
uint8_t *  expectedCurrentValue,
uint8_t  desiredValue 
)

Atomic compare and set.

It compares the contents of a memory location to a given value and, only if they are the same, modifies the contents of that memory location to a given new value. This is done as a single atomic operation. The atomicity guarantees that the new value is calculated based on up-to-date information; if the value had been updated by another thread in the meantime, the write would fail due to a mismatched expectedCurrentValue.

Refer to https://en.wikipedia.org/wiki/Compare-and-set [which may redirect you to the article on compare-and swap].

Parameters
ptrThe target memory location.
[in,out]expectedCurrentValueA pointer to some location holding the expected current value of the data being set atomically. The computed 'desiredValue' should be a function of this current value.
Note
: This is an in-out parameter. In the failure case of atomic_cas (where the destination isn't set), the pointee of expectedCurrentValue is updated with the current value.
Parameters
[in]desiredValueThe new value computed based on '*expectedCurrentValue'.
Returns
true if the memory location was atomically updated with the desired value (after verifying that it contained the expectedCurrentValue), false otherwise. In the failure case, exepctedCurrentValue is updated with the new value of the target memory location.

pseudocode: function cas(p : pointer to int, old : pointer to int, new : int) returns bool { if *p != *old { *old = *p return false } *p = new return true }

Note
: In the failure case (where the destination isn't set), the value pointed to by expectedCurrentValue is instead updated with the current value. This property helps writing concise code for the following incr:

function incr(p : pointer to int, a : int) returns int { done = false value = atomic_load(p) while not done { done = atomic_cas(p, &value, value + a) // *value gets updated automatically until success } return value + a }

However, if the call is made in a loop like this, the atomic_compare_exchange_weak functions are to be preferred.

Note
: This corresponds to the C11 "atomic_compare_exchange_strong" - it always succeeds if the current value is expected, as per the pseudocode above; it will not spuriously fail as "atomic_compare_exchange_weak" may. This call would normally be used when a fail return does not retry.

◆ core_util_atomic_cas_explicit_u8()

MBED_FORCEINLINE bool core_util_atomic_cas_explicit_u8 ( volatile uint8_t *  ptr,
uint8_t *  expectedCurrentValue,
uint8_t  desiredValue,
mbed_memory_order  success,
mbed_memory_order  failure 
)

Atomic compare and set.

It compares the contents of a memory location to a given value and, only if they are the same, modifies the contents of that memory location to a given new value. This is done as a single atomic operation. The atomicity guarantees that the new value is calculated based on up-to-date information; if the value had been updated by another thread in the meantime, the write would fail due to a mismatched expectedCurrentValue.

Refer to https://en.wikipedia.org/wiki/Compare-and-set [which may redirect you to the article on compare-and swap].

Parameters
ptrThe target memory location.
[in,out]expectedCurrentValueA pointer to some location holding the expected current value of the data being set atomically. The computed 'desiredValue' should be a function of this current value.
Note
: This is an in-out parameter. In the failure case of atomic_cas (where the destination isn't set), the pointee of expectedCurrentValue is updated with the current value.
Parameters
[in]desiredValueThe new value computed based on '*expectedCurrentValue'.
Returns
true if the memory location was atomically updated with the desired value (after verifying that it contained the expectedCurrentValue), false otherwise. In the failure case, exepctedCurrentValue is updated with the new value of the target memory location.

pseudocode: function cas(p : pointer to int, old : pointer to int, new : int) returns bool { if *p != *old { *old = *p return false } *p = new return true }

Note
: In the failure case (where the destination isn't set), the value pointed to by expectedCurrentValue is instead updated with the current value. This property helps writing concise code for the following incr:

function incr(p : pointer to int, a : int) returns int { done = false value = atomic_load(p) while not done { done = atomic_cas(p, &value, value + a) // *value gets updated automatically until success } return value + a }

However, if the call is made in a loop like this, the atomic_compare_exchange_weak functions are to be preferred.

Note
: This corresponds to the C11 "atomic_compare_exchange_strong" - it always succeeds if the current value is expected, as per the pseudocode above; it will not spuriously fail as "atomic_compare_exchange_weak" may. This call would normally be used when a fail return does not retry.
Parameters
successmemory ordering constraint for successful exchange
failurememory ordering constraint for failure

◆ core_util_atomic_cas_u16()

bool core_util_atomic_cas_u16 ( volatile uint16_t *  ptr,
uint16_t *  expectedCurrentValue,
uint16_t  desiredValue 
)

Atomic compare and set.

It compares the contents of a memory location to a given value and, only if they are the same, modifies the contents of that memory location to a given new value. This is done as a single atomic operation. The atomicity guarantees that the new value is calculated based on up-to-date information; if the value had been updated by another thread in the meantime, the write would fail due to a mismatched expectedCurrentValue.

Refer to https://en.wikipedia.org/wiki/Compare-and-set [which may redirect you to the article on compare-and swap].

Parameters
ptrThe target memory location.
[in,out]expectedCurrentValueA pointer to some location holding the expected current value of the data being set atomically. The computed 'desiredValue' should be a function of this current value.
Note
: This is an in-out parameter. In the failure case of atomic_cas (where the destination isn't set), the pointee of expectedCurrentValue is updated with the current value.
Parameters
[in]desiredValueThe new value computed based on '*expectedCurrentValue'.
Returns
true if the memory location was atomically updated with the desired value (after verifying that it contained the expectedCurrentValue), false otherwise. In the failure case, exepctedCurrentValue is updated with the new value of the target memory location.

pseudocode: function cas(p : pointer to int, old : pointer to int, new : int) returns bool { if *p != *old { *old = *p return false } *p = new return true }

Note
: In the failure case (where the destination isn't set), the value pointed to by expectedCurrentValue is instead updated with the current value. This property helps writing concise code for the following incr:

function incr(p : pointer to int, a : int) returns int { done = false value = atomic_load(p) while not done { done = atomic_cas(p, &value, value + a) // *value gets updated automatically until success } return value + a }

However, if the call is made in a loop like this, the atomic_compare_exchange_weak functions are to be preferred.

Note
: This corresponds to the C11 "atomic_compare_exchange_strong" - it always succeeds if the current value is expected, as per the pseudocode above; it will not spuriously fail as "atomic_compare_exchange_weak" may. This call would normally be used when a fail return does not retry.

◆ core_util_atomic_cas_explicit_u16()

MBED_FORCEINLINE bool core_util_atomic_cas_explicit_u16 ( volatile uint16_t *  ptr,
uint16_t *  expectedCurrentValue,
uint16_t  desiredValue,
mbed_memory_order  success,
mbed_memory_order  failure 
)

◆ core_util_atomic_cas_u32()

bool core_util_atomic_cas_u32 ( volatile uint32_t *  ptr,
uint32_t *  expectedCurrentValue,
uint32_t  desiredValue 
)

Atomic compare and set.

It compares the contents of a memory location to a given value and, only if they are the same, modifies the contents of that memory location to a given new value. This is done as a single atomic operation. The atomicity guarantees that the new value is calculated based on up-to-date information; if the value had been updated by another thread in the meantime, the write would fail due to a mismatched expectedCurrentValue.

Refer to https://en.wikipedia.org/wiki/Compare-and-set [which may redirect you to the article on compare-and swap].

Parameters
ptrThe target memory location.
[in,out]expectedCurrentValueA pointer to some location holding the expected current value of the data being set atomically. The computed 'desiredValue' should be a function of this current value.
Note
: This is an in-out parameter. In the failure case of atomic_cas (where the destination isn't set), the pointee of expectedCurrentValue is updated with the current value.
Parameters
[in]desiredValueThe new value computed based on '*expectedCurrentValue'.
Returns
true if the memory location was atomically updated with the desired value (after verifying that it contained the expectedCurrentValue), false otherwise. In the failure case, exepctedCurrentValue is updated with the new value of the target memory location.

pseudocode: function cas(p : pointer to int, old : pointer to int, new : int) returns bool { if *p != *old { *old = *p return false } *p = new return true }

Note
: In the failure case (where the destination isn't set), the value pointed to by expectedCurrentValue is instead updated with the current value. This property helps writing concise code for the following incr:

function incr(p : pointer to int, a : int) returns int { done = false value = atomic_load(p) while not done { done = atomic_cas(p, &value, value + a) // *value gets updated automatically until success } return value + a }

However, if the call is made in a loop like this, the atomic_compare_exchange_weak functions are to be preferred.

Note
: This corresponds to the C11 "atomic_compare_exchange_strong" - it always succeeds if the current value is expected, as per the pseudocode above; it will not spuriously fail as "atomic_compare_exchange_weak" may. This call would normally be used when a fail return does not retry.

◆ core_util_atomic_cas_explicit_u32()

MBED_FORCEINLINE bool core_util_atomic_cas_explicit_u32 ( volatile uint32_t *  ptr,
uint32_t *  expectedCurrentValue,
uint32_t  desiredValue,
mbed_memory_order  success,
mbed_memory_order  failure 
)

◆ core_util_atomic_cas_u64()

bool core_util_atomic_cas_u64 ( volatile uint64_t *  ptr,
uint64_t *  expectedCurrentValue,
uint64_t  desiredValue 
)

Atomic compare and set.

It compares the contents of a memory location to a given value and, only if they are the same, modifies the contents of that memory location to a given new value. This is done as a single atomic operation. The atomicity guarantees that the new value is calculated based on up-to-date information; if the value had been updated by another thread in the meantime, the write would fail due to a mismatched expectedCurrentValue.

Refer to https://en.wikipedia.org/wiki/Compare-and-set [which may redirect you to the article on compare-and swap].

Parameters
ptrThe target memory location.
[in,out]expectedCurrentValueA pointer to some location holding the expected current value of the data being set atomically. The computed 'desiredValue' should be a function of this current value.
Note
: This is an in-out parameter. In the failure case of atomic_cas (where the destination isn't set), the pointee of expectedCurrentValue is updated with the current value.
Parameters
[in]desiredValueThe new value computed based on '*expectedCurrentValue'.
Returns
true if the memory location was atomically updated with the desired value (after verifying that it contained the expectedCurrentValue), false otherwise. In the failure case, exepctedCurrentValue is updated with the new value of the target memory location.

pseudocode: function cas(p : pointer to int, old : pointer to int, new : int) returns bool { if *p != *old { *old = *p return false } *p = new return true }

Note
: In the failure case (where the destination isn't set), the value pointed to by expectedCurrentValue is instead updated with the current value. This property helps writing concise code for the following incr:

function incr(p : pointer to int, a : int) returns int { done = false value = atomic_load(p) while not done { done = atomic_cas(p, &value, value + a) // *value gets updated automatically until success } return value + a }

However, if the call is made in a loop like this, the atomic_compare_exchange_weak functions are to be preferred.

Note
: This corresponds to the C11 "atomic_compare_exchange_strong" - it always succeeds if the current value is expected, as per the pseudocode above; it will not spuriously fail as "atomic_compare_exchange_weak" may. This call would normally be used when a fail return does not retry.

◆ core_util_atomic_cas_explicit_u64()

MBED_FORCEINLINE bool core_util_atomic_cas_explicit_u64 ( volatile uint64_t *  ptr,
uint64_t *  expectedCurrentValue,
uint64_t  desiredValue,
mbed_memory_order  success,
mbed_memory_order  failure 
)

◆ core_util_atomic_cas_s8()

MBED_FORCEINLINE bool core_util_atomic_cas_s8 ( volatile int8_t *  ptr,
int8_t *  expectedCurrentValue,
int8_t  desiredValue 
)

Atomic compare and set.

It compares the contents of a memory location to a given value and, only if they are the same, modifies the contents of that memory location to a given new value. This is done as a single atomic operation. The atomicity guarantees that the new value is calculated based on up-to-date information; if the value had been updated by another thread in the meantime, the write would fail due to a mismatched expectedCurrentValue.

Refer to https://en.wikipedia.org/wiki/Compare-and-set [which may redirect you to the article on compare-and swap].

Parameters
ptrThe target memory location.
[in,out]expectedCurrentValueA pointer to some location holding the expected current value of the data being set atomically. The computed 'desiredValue' should be a function of this current value.
Note
: This is an in-out parameter. In the failure case of atomic_cas (where the destination isn't set), the pointee of expectedCurrentValue is updated with the current value.
Parameters
[in]desiredValueThe new value computed based on '*expectedCurrentValue'.
Returns
true if the memory location was atomically updated with the desired value (after verifying that it contained the expectedCurrentValue), false otherwise. In the failure case, exepctedCurrentValue is updated with the new value of the target memory location.

pseudocode: function cas(p : pointer to int, old : pointer to int, new : int) returns bool { if *p != *old { *old = *p return false } *p = new return true }

Note
: In the failure case (where the destination isn't set), the value pointed to by expectedCurrentValue is instead updated with the current value. This property helps writing concise code for the following incr:

function incr(p : pointer to int, a : int) returns int { done = false value = atomic_load(p) while not done { done = atomic_cas(p, &value, value + a) // *value gets updated automatically until success } return value + a }

However, if the call is made in a loop like this, the atomic_compare_exchange_weak functions are to be preferred.

Note
: This corresponds to the C11 "atomic_compare_exchange_strong" - it always succeeds if the current value is expected, as per the pseudocode above; it will not spuriously fail as "atomic_compare_exchange_weak" may. This call would normally be used when a fail return does not retry.

Definition at line 769 of file mbed_atomic_impl.h.

◆ core_util_atomic_cas_explicit_s8()

MBED_FORCEINLINE bool core_util_atomic_cas_explicit_s8 ( volatile int8_t *  ptr,
int8_t *  expectedCurrentValue,
int8_t  desiredValue,
mbed_memory_order  success,
mbed_memory_order  failure 
)

Definition at line 769 of file mbed_atomic_impl.h.

◆ core_util_atomic_cas_s16()

MBED_FORCEINLINE bool core_util_atomic_cas_s16 ( volatile int16_t *  ptr,
int16_t *  expectedCurrentValue,
int16_t  desiredValue 
)

Atomic compare and set.

It compares the contents of a memory location to a given value and, only if they are the same, modifies the contents of that memory location to a given new value. This is done as a single atomic operation. The atomicity guarantees that the new value is calculated based on up-to-date information; if the value had been updated by another thread in the meantime, the write would fail due to a mismatched expectedCurrentValue.

Refer to https://en.wikipedia.org/wiki/Compare-and-set [which may redirect you to the article on compare-and swap].

Parameters
ptrThe target memory location.
[in,out]expectedCurrentValueA pointer to some location holding the expected current value of the data being set atomically. The computed 'desiredValue' should be a function of this current value.
Note
: This is an in-out parameter. In the failure case of atomic_cas (where the destination isn't set), the pointee of expectedCurrentValue is updated with the current value.
Parameters
[in]desiredValueThe new value computed based on '*expectedCurrentValue'.
Returns
true if the memory location was atomically updated with the desired value (after verifying that it contained the expectedCurrentValue), false otherwise. In the failure case, exepctedCurrentValue is updated with the new value of the target memory location.

pseudocode: function cas(p : pointer to int, old : pointer to int, new : int) returns bool { if *p != *old { *old = *p return false } *p = new return true }

Note
: In the failure case (where the destination isn't set), the value pointed to by expectedCurrentValue is instead updated with the current value. This property helps writing concise code for the following incr:

function incr(p : pointer to int, a : int) returns int { done = false value = atomic_load(p) while not done { done = atomic_cas(p, &value, value + a) // *value gets updated automatically until success } return value + a }

However, if the call is made in a loop like this, the atomic_compare_exchange_weak functions are to be preferred.

Note
: This corresponds to the C11 "atomic_compare_exchange_strong" - it always succeeds if the current value is expected, as per the pseudocode above; it will not spuriously fail as "atomic_compare_exchange_weak" may. This call would normally be used when a fail return does not retry.

Definition at line 769 of file mbed_atomic_impl.h.

◆ core_util_atomic_cas_explicit_s16()

MBED_FORCEINLINE bool core_util_atomic_cas_explicit_s16 ( volatile int16_t *  ptr,
int16_t *  expectedCurrentValue,
int16_t  desiredValue,
mbed_memory_order  success,
mbed_memory_order  failure 
)

Definition at line 769 of file mbed_atomic_impl.h.

◆ core_util_atomic_cas_s32()

MBED_FORCEINLINE bool core_util_atomic_cas_s32 ( volatile int32_t *  ptr,
int32_t *  expectedCurrentValue,
int32_t  desiredValue 
)

Atomic compare and set.

It compares the contents of a memory location to a given value and, only if they are the same, modifies the contents of that memory location to a given new value. This is done as a single atomic operation. The atomicity guarantees that the new value is calculated based on up-to-date information; if the value had been updated by another thread in the meantime, the write would fail due to a mismatched expectedCurrentValue.

Refer to https://en.wikipedia.org/wiki/Compare-and-set [which may redirect you to the article on compare-and swap].

Parameters
ptrThe target memory location.
[in,out]expectedCurrentValueA pointer to some location holding the expected current value of the data being set atomically. The computed 'desiredValue' should be a function of this current value.
Note
: This is an in-out parameter. In the failure case of atomic_cas (where the destination isn't set), the pointee of expectedCurrentValue is updated with the current value.
Parameters
[in]desiredValueThe new value computed based on '*expectedCurrentValue'.
Returns
true if the memory location was atomically updated with the desired value (after verifying that it contained the expectedCurrentValue), false otherwise. In the failure case, exepctedCurrentValue is updated with the new value of the target memory location.

pseudocode: function cas(p : pointer to int, old : pointer to int, new : int) returns bool { if *p != *old { *old = *p return false } *p = new return true }

Note
: In the failure case (where the destination isn't set), the value pointed to by expectedCurrentValue is instead updated with the current value. This property helps writing concise code for the following incr:

function incr(p : pointer to int, a : int) returns int { done = false value = atomic_load(p) while not done { done = atomic_cas(p, &value, value + a) // *value gets updated automatically until success } return value + a }

However, if the call is made in a loop like this, the atomic_compare_exchange_weak functions are to be preferred.

Note
: This corresponds to the C11 "atomic_compare_exchange_strong" - it always succeeds if the current value is expected, as per the pseudocode above; it will not spuriously fail as "atomic_compare_exchange_weak" may. This call would normally be used when a fail return does not retry.

Definition at line 769 of file mbed_atomic_impl.h.

◆ core_util_atomic_cas_explicit_s32()

MBED_FORCEINLINE bool core_util_atomic_cas_explicit_s32 ( volatile int32_t *  ptr,
int32_t *  expectedCurrentValue,
int32_t  desiredValue,
mbed_memory_order  success,
mbed_memory_order  failure 
)

Definition at line 769 of file mbed_atomic_impl.h.

◆ core_util_atomic_cas_s64()

MBED_FORCEINLINE bool core_util_atomic_cas_s64 ( volatile int64_t *  ptr,
int64_t *  expectedCurrentValue,
int64_t  desiredValue 
)

Atomic compare and set.

It compares the contents of a memory location to a given value and, only if they are the same, modifies the contents of that memory location to a given new value. This is done as a single atomic operation. The atomicity guarantees that the new value is calculated based on up-to-date information; if the value had been updated by another thread in the meantime, the write would fail due to a mismatched expectedCurrentValue.

Refer to https://en.wikipedia.org/wiki/Compare-and-set [which may redirect you to the article on compare-and swap].

Parameters
ptrThe target memory location.
[in,out]expectedCurrentValueA pointer to some location holding the expected current value of the data being set atomically. The computed 'desiredValue' should be a function of this current value.
Note
: This is an in-out parameter. In the failure case of atomic_cas (where the destination isn't set), the pointee of expectedCurrentValue is updated with the current value.
Parameters
[in]desiredValueThe new value computed based on '*expectedCurrentValue'.
Returns
true if the memory location was atomically updated with the desired value (after verifying that it contained the expectedCurrentValue), false otherwise. In the failure case, exepctedCurrentValue is updated with the new value of the target memory location.

pseudocode: function cas(p : pointer to int, old : pointer to int, new : int) returns bool { if *p != *old { *old = *p return false } *p = new return true }

Note
: In the failure case (where the destination isn't set), the value pointed to by expectedCurrentValue is instead updated with the current value. This property helps writing concise code for the following incr:

function incr(p : pointer to int, a : int) returns int { done = false value = atomic_load(p) while not done { done = atomic_cas(p, &value, value + a) // *value gets updated automatically until success } return value + a }

However, if the call is made in a loop like this, the atomic_compare_exchange_weak functions are to be preferred.

Note
: This corresponds to the C11 "atomic_compare_exchange_strong" - it always succeeds if the current value is expected, as per the pseudocode above; it will not spuriously fail as "atomic_compare_exchange_weak" may. This call would normally be used when a fail return does not retry.

Definition at line 769 of file mbed_atomic_impl.h.

◆ core_util_atomic_cas_explicit_s64()

MBED_FORCEINLINE bool core_util_atomic_cas_explicit_s64 ( volatile int64_t *  ptr,
int64_t *  expectedCurrentValue,
int64_t  desiredValue,
mbed_memory_order  success,
mbed_memory_order  failure 
)

Definition at line 769 of file mbed_atomic_impl.h.

◆ core_util_atomic_cas_bool()

MBED_FORCEINLINE bool core_util_atomic_cas_bool ( volatile bool *  ptr,
bool *  expectedCurrentValue,
bool  desiredValue 
)

Atomic compare and set.

It compares the contents of a memory location to a given value and, only if they are the same, modifies the contents of that memory location to a given new value. This is done as a single atomic operation. The atomicity guarantees that the new value is calculated based on up-to-date information; if the value had been updated by another thread in the meantime, the write would fail due to a mismatched expectedCurrentValue.

Refer to https://en.wikipedia.org/wiki/Compare-and-set [which may redirect you to the article on compare-and swap].

Parameters
ptrThe target memory location.
[in,out]expectedCurrentValueA pointer to some location holding the expected current value of the data being set atomically. The computed 'desiredValue' should be a function of this current value.
Note
: This is an in-out parameter. In the failure case of atomic_cas (where the destination isn't set), the pointee of expectedCurrentValue is updated with the current value.
Parameters
[in]desiredValueThe new value computed based on '*expectedCurrentValue'.
Returns
true if the memory location was atomically updated with the desired value (after verifying that it contained the expectedCurrentValue), false otherwise. In the failure case, exepctedCurrentValue is updated with the new value of the target memory location.

pseudocode: function cas(p : pointer to int, old : pointer to int, new : int) returns bool { if *p != *old { *old = *p return false } *p = new return true }

Note
: In the failure case (where the destination isn't set), the value pointed to by expectedCurrentValue is instead updated with the current value. This property helps writing concise code for the following incr:

function incr(p : pointer to int, a : int) returns int { done = false value = atomic_load(p) while not done { done = atomic_cas(p, &value, value + a) // *value gets updated automatically until success } return value + a }

However, if the call is made in a loop like this, the atomic_compare_exchange_weak functions are to be preferred.

Note
: This corresponds to the C11 "atomic_compare_exchange_strong" - it always succeeds if the current value is expected, as per the pseudocode above; it will not spuriously fail as "atomic_compare_exchange_weak" may. This call would normally be used when a fail return does not retry.

Definition at line 772 of file mbed_atomic_impl.h.

◆ core_util_atomic_cas_explicit_bool()

MBED_FORCEINLINE bool core_util_atomic_cas_explicit_bool ( volatile bool *  ptr,
bool *  expectedCurrentValue,
bool  desiredValue,
mbed_memory_order  success,
mbed_memory_order  failure 
)

Definition at line 777 of file mbed_atomic_impl.h.

◆ core_util_atomic_cas_ptr()

bool core_util_atomic_cas_ptr ( void *volatile *  ptr,
void **  expectedCurrentValue,
void *  desiredValue 
)

Atomic compare and set.

It compares the contents of a memory location to a given value and, only if they are the same, modifies the contents of that memory location to a given new value. This is done as a single atomic operation. The atomicity guarantees that the new value is calculated based on up-to-date information; if the value had been updated by another thread in the meantime, the write would fail due to a mismatched expectedCurrentValue.

Refer to https://en.wikipedia.org/wiki/Compare-and-set [which may redirect you to the article on compare-and swap].

Parameters
ptrThe target memory location.
[in,out]expectedCurrentValueA pointer to some location holding the expected current value of the data being set atomically. The computed 'desiredValue' should be a function of this current value.
Note
: This is an in-out parameter. In the failure case of atomic_cas (where the destination isn't set), the pointee of expectedCurrentValue is updated with the current value.
Parameters
[in]desiredValueThe new value computed based on '*expectedCurrentValue'.
Returns
true if the memory location was atomically updated with the desired value (after verifying that it contained the expectedCurrentValue), false otherwise. In the failure case, exepctedCurrentValue is updated with the new value of the target memory location.

pseudocode: function cas(p : pointer to int, old : pointer to int, new : int) returns bool { if *p != *old { *old = *p return false } *p = new return true }

Note
: In the failure case (where the destination isn't set), the value pointed to by expectedCurrentValue is instead updated with the current value. This property helps writing concise code for the following incr:

function incr(p : pointer to int, a : int) returns int { done = false value = atomic_load(p) while not done { done = atomic_cas(p, &value, value + a) // *value gets updated automatically until success } return value + a }

However, if the call is made in a loop like this, the atomic_compare_exchange_weak functions are to be preferred.

Note
: This corresponds to the C11 "atomic_compare_exchange_strong" - it always succeeds if the current value is expected, as per the pseudocode above; it will not spuriously fail as "atomic_compare_exchange_weak" may. This call would normally be used when a fail return does not retry.

Definition at line 782 of file mbed_atomic_impl.h.

◆ core_util_atomic_cas_explicit_ptr()

MBED_FORCEINLINE bool core_util_atomic_cas_explicit_ptr ( void *volatile *  ptr,
void **  expectedCurrentValue,
void *  desiredValue,
mbed_memory_order  success,
mbed_memory_order  failure 
)

Definition at line 797 of file mbed_atomic_impl.h.

◆ core_util_atomic_compare_exchange_weak_u8()

bool core_util_atomic_compare_exchange_weak_u8 ( volatile uint8_t *  ptr,
uint8_t *  expectedCurrentValue,
uint8_t  desiredValue 
)

Atomic compare and set.

It compares the contents of a memory location to a given value and, only if they are the same, modifies the contents of that memory location to a given new value. This is done as a single atomic operation. The atomicity guarantees that the new value is calculated based on up-to-date information; if the value had been updated by another thread in the meantime, the write would fail due to a mismatched expectedCurrentValue.

Refer to https://en.wikipedia.org/wiki/Compare-and-set [which may redirect you to the article on compare-and swap].

Parameters
ptrThe target memory location.
[in,out]expectedCurrentValueA pointer to some location holding the expected current value of the data being set atomically. The computed 'desiredValue' should be a function of this current value.
Note
: This is an in-out parameter. In the failure case of atomic_cas (where the destination isn't set), the pointee of expectedCurrentValue is updated with the current value.
Parameters
[in]desiredValueThe new value computed based on '*expectedCurrentValue'.
Returns
true if the memory location was atomically updated with the desired value (after verifying that it contained the expectedCurrentValue), false otherwise. In the failure case, exepctedCurrentValue is updated with the new value of the target memory location.

pseudocode: function cas(p : pointer to int, old : pointer to int, new : int) returns bool { if *p != *old or spurious failure { *old = *p return false } *p = new return true }

Note
: In the failure case (where the destination isn't set), the value pointed to by expectedCurrentValue is instead updated with the current value. This property helps writing concise code for the following incr:

function incr(p : pointer to int, a : int) returns int { done = false value = *p // This fetch operation need not be atomic. while not done { done = atomic_compare_exchange_weak(p, &value, value + a) // *value gets updated automatically until success } return value + a }

Note
: This corresponds to the C11 "atomic_compare_exchange_weak" - it may spuriously fail if the current value is expected, as per the pseudocode above; it will not spuriously fail as "atomic_compare_exchange_weak" may. This call would normally be used when a fail return will cause a retry anyway, saving the need for an extra loop inside the cas operation.

◆ core_util_atomic_compare_exchange_weak_explicit_u8()

MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_u8 ( volatile uint8_t *  ptr,
uint8_t *  expectedCurrentValue,
uint8_t  desiredValue,
mbed_memory_order  success,
mbed_memory_order  failure 
)

Atomic compare and set.

It compares the contents of a memory location to a given value and, only if they are the same, modifies the contents of that memory location to a given new value. This is done as a single atomic operation. The atomicity guarantees that the new value is calculated based on up-to-date information; if the value had been updated by another thread in the meantime, the write would fail due to a mismatched expectedCurrentValue.

Refer to https://en.wikipedia.org/wiki/Compare-and-set [which may redirect you to the article on compare-and swap].

Parameters
ptrThe target memory location.
[in,out]expectedCurrentValueA pointer to some location holding the expected current value of the data being set atomically. The computed 'desiredValue' should be a function of this current value.
Note
: This is an in-out parameter. In the failure case of atomic_cas (where the destination isn't set), the pointee of expectedCurrentValue is updated with the current value.
Parameters
[in]desiredValueThe new value computed based on '*expectedCurrentValue'.
Returns
true if the memory location was atomically updated with the desired value (after verifying that it contained the expectedCurrentValue), false otherwise. In the failure case, exepctedCurrentValue is updated with the new value of the target memory location.

pseudocode: function cas(p : pointer to int, old : pointer to int, new : int) returns bool { if *p != *old or spurious failure { *old = *p return false } *p = new return true }

Note
: In the failure case (where the destination isn't set), the value pointed to by expectedCurrentValue is instead updated with the current value. This property helps writing concise code for the following incr:

function incr(p : pointer to int, a : int) returns int { done = false value = *p // This fetch operation need not be atomic. while not done { done = atomic_compare_exchange_weak(p, &value, value + a) // *value gets updated automatically until success } return value + a }

Note
: This corresponds to the C11 "atomic_compare_exchange_weak" - it may spuriously fail if the current value is expected, as per the pseudocode above; it will not spuriously fail as "atomic_compare_exchange_weak" may. This call would normally be used when a fail return will cause a retry anyway, saving the need for an extra loop inside the cas operation.
Parameters
successmemory ordering constraint for successful exchange
failurememory ordering constraint for failure

◆ core_util_atomic_compare_exchange_weak_u16()

bool core_util_atomic_compare_exchange_weak_u16 ( volatile uint16_t *  ptr,
uint16_t *  expectedCurrentValue,
uint16_t  desiredValue 
)

Atomic compare and set.

It compares the contents of a memory location to a given value and, only if they are the same, modifies the contents of that memory location to a given new value. This is done as a single atomic operation. The atomicity guarantees that the new value is calculated based on up-to-date information; if the value had been updated by another thread in the meantime, the write would fail due to a mismatched expectedCurrentValue.

Refer to https://en.wikipedia.org/wiki/Compare-and-set [which may redirect you to the article on compare-and swap].

Parameters
ptrThe target memory location.
[in,out]expectedCurrentValueA pointer to some location holding the expected current value of the data being set atomically. The computed 'desiredValue' should be a function of this current value.
Note
: This is an in-out parameter. In the failure case of atomic_cas (where the destination isn't set), the pointee of expectedCurrentValue is updated with the current value.
Parameters
[in]desiredValueThe new value computed based on '*expectedCurrentValue'.
Returns
true if the memory location was atomically updated with the desired value (after verifying that it contained the expectedCurrentValue), false otherwise. In the failure case, exepctedCurrentValue is updated with the new value of the target memory location.

pseudocode: function cas(p : pointer to int, old : pointer to int, new : int) returns bool { if *p != *old or spurious failure { *old = *p return false } *p = new return true }

Note
: In the failure case (where the destination isn't set), the value pointed to by expectedCurrentValue is instead updated with the current value. This property helps writing concise code for the following incr:

function incr(p : pointer to int, a : int) returns int { done = false value = *p // This fetch operation need not be atomic. while not done { done = atomic_compare_exchange_weak(p, &value, value + a) // *value gets updated automatically until success } return value + a }

Note
: This corresponds to the C11 "atomic_compare_exchange_weak" - it may spuriously fail if the current value is expected, as per the pseudocode above; it will not spuriously fail as "atomic_compare_exchange_weak" may. This call would normally be used when a fail return will cause a retry anyway, saving the need for an extra loop inside the cas operation.

◆ core_util_atomic_compare_exchange_weak_explicit_u16()

MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_u16 ( volatile uint16_t *  ptr,
uint16_t *  expectedCurrentValue,
uint16_t  desiredValue,
mbed_memory_order  success,
mbed_memory_order  failure 
)

◆ core_util_atomic_compare_exchange_weak_u32()

bool core_util_atomic_compare_exchange_weak_u32 ( volatile uint32_t *  ptr,
uint32_t *  expectedCurrentValue,
uint32_t  desiredValue 
)

Atomic compare and set.

It compares the contents of a memory location to a given value and, only if they are the same, modifies the contents of that memory location to a given new value. This is done as a single atomic operation. The atomicity guarantees that the new value is calculated based on up-to-date information; if the value had been updated by another thread in the meantime, the write would fail due to a mismatched expectedCurrentValue.

Refer to https://en.wikipedia.org/wiki/Compare-and-set [which may redirect you to the article on compare-and swap].

Parameters
ptrThe target memory location.
[in,out]expectedCurrentValueA pointer to some location holding the expected current value of the data being set atomically. The computed 'desiredValue' should be a function of this current value.
Note
: This is an in-out parameter. In the failure case of atomic_cas (where the destination isn't set), the pointee of expectedCurrentValue is updated with the current value.
Parameters
[in]desiredValueThe new value computed based on '*expectedCurrentValue'.
Returns
true if the memory location was atomically updated with the desired value (after verifying that it contained the expectedCurrentValue), false otherwise. In the failure case, exepctedCurrentValue is updated with the new value of the target memory location.

pseudocode: function cas(p : pointer to int, old : pointer to int, new : int) returns bool { if *p != *old or spurious failure { *old = *p return false } *p = new return true }

Note
: In the failure case (where the destination isn't set), the value pointed to by expectedCurrentValue is instead updated with the current value. This property helps writing concise code for the following incr:

function incr(p : pointer to int, a : int) returns int { done = false value = *p // This fetch operation need not be atomic. while not done { done = atomic_compare_exchange_weak(p, &value, value + a) // *value gets updated automatically until success } return value + a }

Note
: This corresponds to the C11 "atomic_compare_exchange_weak" - it may spuriously fail if the current value is expected, as per the pseudocode above; it will not spuriously fail as "atomic_compare_exchange_weak" may. This call would normally be used when a fail return will cause a retry anyway, saving the need for an extra loop inside the cas operation.

◆ core_util_atomic_compare_exchange_weak_explicit_u32()

MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_u32 ( volatile uint32_t *  ptr,
uint32_t *  expectedCurrentValue,
uint32_t  desiredValue,
mbed_memory_order  success,
mbed_memory_order  failure 
)

◆ core_util_atomic_compare_exchange_weak_u64()

bool core_util_atomic_compare_exchange_weak_u64 ( volatile uint64_t *  ptr,
uint64_t *  expectedCurrentValue,
uint64_t  desiredValue 
)

Atomic compare and set.

It compares the contents of a memory location to a given value and, only if they are the same, modifies the contents of that memory location to a given new value. This is done as a single atomic operation. The atomicity guarantees that the new value is calculated based on up-to-date information; if the value had been updated by another thread in the meantime, the write would fail due to a mismatched expectedCurrentValue.

Refer to https://en.wikipedia.org/wiki/Compare-and-set [which may redirect you to the article on compare-and swap].

Parameters
ptrThe target memory location.
[in,out]expectedCurrentValueA pointer to some location holding the expected current value of the data being set atomically. The computed 'desiredValue' should be a function of this current value.
Note
: This is an in-out parameter. In the failure case of atomic_cas (where the destination isn't set), the pointee of expectedCurrentValue is updated with the current value.
Parameters
[in]desiredValueThe new value computed based on '*expectedCurrentValue'.
Returns
true if the memory location was atomically updated with the desired value (after verifying that it contained the expectedCurrentValue), false otherwise. In the failure case, exepctedCurrentValue is updated with the new value of the target memory location.

pseudocode: function cas(p : pointer to int, old : pointer to int, new : int) returns bool { if *p != *old or spurious failure { *old = *p return false } *p = new return true }

Note
: In the failure case (where the destination isn't set), the value pointed to by expectedCurrentValue is instead updated with the current value. This property helps writing concise code for the following incr:

function incr(p : pointer to int, a : int) returns int { done = false value = *p // This fetch operation need not be atomic. while not done { done = atomic_compare_exchange_weak(p, &value, value + a) // *value gets updated automatically until success } return value + a }

Note
: This corresponds to the C11 "atomic_compare_exchange_weak" - it may spuriously fail if the current value is expected, as per the pseudocode above; it will not spuriously fail as "atomic_compare_exchange_weak" may. This call would normally be used when a fail return will cause a retry anyway, saving the need for an extra loop inside the cas operation.

◆ core_util_atomic_compare_exchange_weak_explicit_u64()

MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_u64 ( volatile uint64_t *  ptr,
uint64_t *  expectedCurrentValue,
uint64_t  desiredValue,
mbed_memory_order  success,
mbed_memory_order  failure 
)

◆ core_util_atomic_compare_exchange_weak_s8()

MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_s8 ( volatile int8_t *  ptr,
int8_t *  expectedCurrentValue,
int8_t  desiredValue 
)

Atomic compare and set.

It compares the contents of a memory location to a given value and, only if they are the same, modifies the contents of that memory location to a given new value. This is done as a single atomic operation. The atomicity guarantees that the new value is calculated based on up-to-date information; if the value had been updated by another thread in the meantime, the write would fail due to a mismatched expectedCurrentValue.

Refer to https://en.wikipedia.org/wiki/Compare-and-set [which may redirect you to the article on compare-and swap].

Parameters
ptrThe target memory location.
[in,out]expectedCurrentValueA pointer to some location holding the expected current value of the data being set atomically. The computed 'desiredValue' should be a function of this current value.
Note
: This is an in-out parameter. In the failure case of atomic_cas (where the destination isn't set), the pointee of expectedCurrentValue is updated with the current value.
Parameters
[in]desiredValueThe new value computed based on '*expectedCurrentValue'.
Returns
true if the memory location was atomically updated with the desired value (after verifying that it contained the expectedCurrentValue), false otherwise. In the failure case, exepctedCurrentValue is updated with the new value of the target memory location.

pseudocode: function cas(p : pointer to int, old : pointer to int, new : int) returns bool { if *p != *old or spurious failure { *old = *p return false } *p = new return true }

Note
: In the failure case (where the destination isn't set), the value pointed to by expectedCurrentValue is instead updated with the current value. This property helps writing concise code for the following incr:

function incr(p : pointer to int, a : int) returns int { done = false value = *p // This fetch operation need not be atomic. while not done { done = atomic_compare_exchange_weak(p, &value, value + a) // *value gets updated automatically until success } return value + a }

Note
: This corresponds to the C11 "atomic_compare_exchange_weak" - it may spuriously fail if the current value is expected, as per the pseudocode above; it will not spuriously fail as "atomic_compare_exchange_weak" may. This call would normally be used when a fail return will cause a retry anyway, saving the need for an extra loop inside the cas operation.

Definition at line 770 of file mbed_atomic_impl.h.

◆ core_util_atomic_compare_exchange_weak_explicit_s8()

MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_s8 ( volatile int8_t *  ptr,
int8_t *  expectedCurrentValue,
int8_t  desiredValue,
mbed_memory_order  success,
mbed_memory_order  failure 
)

Definition at line 770 of file mbed_atomic_impl.h.

◆ core_util_atomic_compare_exchange_weak_s16()

MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_s16 ( volatile int16_t *  ptr,
int16_t *  expectedCurrentValue,
int16_t  desiredValue 
)

Atomic compare and set.

It compares the contents of a memory location to a given value and, only if they are the same, modifies the contents of that memory location to a given new value. This is done as a single atomic operation. The atomicity guarantees that the new value is calculated based on up-to-date information; if the value had been updated by another thread in the meantime, the write would fail due to a mismatched expectedCurrentValue.

Refer to https://en.wikipedia.org/wiki/Compare-and-set [which may redirect you to the article on compare-and swap].

Parameters
ptrThe target memory location.
[in,out]expectedCurrentValueA pointer to some location holding the expected current value of the data being set atomically. The computed 'desiredValue' should be a function of this current value.
Note
: This is an in-out parameter. In the failure case of atomic_cas (where the destination isn't set), the pointee of expectedCurrentValue is updated with the current value.
Parameters
[in]desiredValueThe new value computed based on '*expectedCurrentValue'.
Returns
true if the memory location was atomically updated with the desired value (after verifying that it contained the expectedCurrentValue), false otherwise. In the failure case, exepctedCurrentValue is updated with the new value of the target memory location.

pseudocode: function cas(p : pointer to int, old : pointer to int, new : int) returns bool { if *p != *old or spurious failure { *old = *p return false } *p = new return true }

Note
: In the failure case (where the destination isn't set), the value pointed to by expectedCurrentValue is instead updated with the current value. This property helps writing concise code for the following incr:

function incr(p : pointer to int, a : int) returns int { done = false value = *p // This fetch operation need not be atomic. while not done { done = atomic_compare_exchange_weak(p, &value, value + a) // *value gets updated automatically until success } return value + a }

Note
: This corresponds to the C11 "atomic_compare_exchange_weak" - it may spuriously fail if the current value is expected, as per the pseudocode above; it will not spuriously fail as "atomic_compare_exchange_weak" may. This call would normally be used when a fail return will cause a retry anyway, saving the need for an extra loop inside the cas operation.

Definition at line 770 of file mbed_atomic_impl.h.

◆ core_util_atomic_compare_exchange_weak_explicit_s16()

MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_s16 ( volatile int16_t *  ptr,
int16_t *  expectedCurrentValue,
int16_t  desiredValue,
mbed_memory_order  success,
mbed_memory_order  failure 
)

Definition at line 770 of file mbed_atomic_impl.h.

◆ core_util_atomic_compare_exchange_weak_s32()

MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_s32 ( volatile int32_t *  ptr,
int32_t *  expectedCurrentValue,
int32_t  desiredValue 
)

Atomic compare and set.

It compares the contents of a memory location to a given value and, only if they are the same, modifies the contents of that memory location to a given new value. This is done as a single atomic operation. The atomicity guarantees that the new value is calculated based on up-to-date information; if the value had been updated by another thread in the meantime, the write would fail due to a mismatched expectedCurrentValue.

Refer to https://en.wikipedia.org/wiki/Compare-and-set [which may redirect you to the article on compare-and swap].

Parameters
ptrThe target memory location.
[in,out]expectedCurrentValueA pointer to some location holding the expected current value of the data being set atomically. The computed 'desiredValue' should be a function of this current value.
Note
: This is an in-out parameter. In the failure case of atomic_cas (where the destination isn't set), the pointee of expectedCurrentValue is updated with the current value.
Parameters
[in]desiredValueThe new value computed based on '*expectedCurrentValue'.
Returns
true if the memory location was atomically updated with the desired value (after verifying that it contained the expectedCurrentValue), false otherwise. In the failure case, exepctedCurrentValue is updated with the new value of the target memory location.

pseudocode: function cas(p : pointer to int, old : pointer to int, new : int) returns bool { if *p != *old or spurious failure { *old = *p return false } *p = new return true }

Note
: In the failure case (where the destination isn't set), the value pointed to by expectedCurrentValue is instead updated with the current value. This property helps writing concise code for the following incr:

function incr(p : pointer to int, a : int) returns int { done = false value = *p // This fetch operation need not be atomic. while not done { done = atomic_compare_exchange_weak(p, &value, value + a) // *value gets updated automatically until success } return value + a }

Note
: This corresponds to the C11 "atomic_compare_exchange_weak" - it may spuriously fail if the current value is expected, as per the pseudocode above; it will not spuriously fail as "atomic_compare_exchange_weak" may. This call would normally be used when a fail return will cause a retry anyway, saving the need for an extra loop inside the cas operation.

Definition at line 770 of file mbed_atomic_impl.h.

◆ core_util_atomic_compare_exchange_weak_explicit_s32()

MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_s32 ( volatile int32_t *  ptr,
int32_t *  expectedCurrentValue,
int32_t  desiredValue,
mbed_memory_order  success,
mbed_memory_order  failure 
)

Definition at line 770 of file mbed_atomic_impl.h.

◆ core_util_atomic_compare_exchange_weak_s64()

MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_s64 ( volatile int64_t *  ptr,
int64_t *  expectedCurrentValue,
int64_t  desiredValue 
)

Atomic compare and set.

It compares the contents of a memory location to a given value and, only if they are the same, modifies the contents of that memory location to a given new value. This is done as a single atomic operation. The atomicity guarantees that the new value is calculated based on up-to-date information; if the value had been updated by another thread in the meantime, the write would fail due to a mismatched expectedCurrentValue.

Refer to https://en.wikipedia.org/wiki/Compare-and-set [which may redirect you to the article on compare-and swap].

Parameters
ptrThe target memory location.
[in,out]expectedCurrentValueA pointer to some location holding the expected current value of the data being set atomically. The computed 'desiredValue' should be a function of this current value.
Note
: This is an in-out parameter. In the failure case of atomic_cas (where the destination isn't set), the pointee of expectedCurrentValue is updated with the current value.
Parameters
[in]desiredValueThe new value computed based on '*expectedCurrentValue'.
Returns
true if the memory location was atomically updated with the desired value (after verifying that it contained the expectedCurrentValue), false otherwise. In the failure case, exepctedCurrentValue is updated with the new value of the target memory location.

pseudocode: function cas(p : pointer to int, old : pointer to int, new : int) returns bool { if *p != *old or spurious failure { *old = *p return false } *p = new return true }

Note
: In the failure case (where the destination isn't set), the value pointed to by expectedCurrentValue is instead updated with the current value. This property helps writing concise code for the following incr:

function incr(p : pointer to int, a : int) returns int { done = false value = *p // This fetch operation need not be atomic. while not done { done = atomic_compare_exchange_weak(p, &value, value + a) // *value gets updated automatically until success } return value + a }

Note
: This corresponds to the C11 "atomic_compare_exchange_weak" - it may spuriously fail if the current value is expected, as per the pseudocode above; it will not spuriously fail as "atomic_compare_exchange_weak" may. This call would normally be used when a fail return will cause a retry anyway, saving the need for an extra loop inside the cas operation.

Definition at line 770 of file mbed_atomic_impl.h.

◆ core_util_atomic_compare_exchange_weak_explicit_s64()

MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_s64 ( volatile int64_t *  ptr,
int64_t *  expectedCurrentValue,
int64_t  desiredValue,
mbed_memory_order  success,
mbed_memory_order  failure 
)

Definition at line 770 of file mbed_atomic_impl.h.

◆ core_util_atomic_compare_exchange_weak_bool()

MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_bool ( volatile bool *  ptr,
bool *  expectedCurrentValue,
bool  desiredValue 
)

Atomic compare and set.

It compares the contents of a memory location to a given value and, only if they are the same, modifies the contents of that memory location to a given new value. This is done as a single atomic operation. The atomicity guarantees that the new value is calculated based on up-to-date information; if the value had been updated by another thread in the meantime, the write would fail due to a mismatched expectedCurrentValue.

Refer to https://en.wikipedia.org/wiki/Compare-and-set [which may redirect you to the article on compare-and swap].

Parameters
ptrThe target memory location.
[in,out]expectedCurrentValueA pointer to some location holding the expected current value of the data being set atomically. The computed 'desiredValue' should be a function of this current value.
Note
: This is an in-out parameter. In the failure case of atomic_cas (where the destination isn't set), the pointee of expectedCurrentValue is updated with the current value.
Parameters
[in]desiredValueThe new value computed based on '*expectedCurrentValue'.
Returns
true if the memory location was atomically updated with the desired value (after verifying that it contained the expectedCurrentValue), false otherwise. In the failure case, exepctedCurrentValue is updated with the new value of the target memory location.

pseudocode: function cas(p : pointer to int, old : pointer to int, new : int) returns bool { if *p != *old or spurious failure { *old = *p return false } *p = new return true }

Note
: In the failure case (where the destination isn't set), the value pointed to by expectedCurrentValue is instead updated with the current value. This property helps writing concise code for the following incr:

function incr(p : pointer to int, a : int) returns int { done = false value = *p // This fetch operation need not be atomic. while not done { done = atomic_compare_exchange_weak(p, &value, value + a) // *value gets updated automatically until success } return value + a }

Note
: This corresponds to the C11 "atomic_compare_exchange_weak" - it may spuriously fail if the current value is expected, as per the pseudocode above; it will not spuriously fail as "atomic_compare_exchange_weak" may. This call would normally be used when a fail return will cause a retry anyway, saving the need for an extra loop inside the cas operation.

Definition at line 814 of file mbed_atomic_impl.h.

◆ core_util_atomic_compare_exchange_weak_explicit_bool()

MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_bool ( volatile bool *  ptr,
bool *  expectedCurrentValue,
bool  desiredValue,
mbed_memory_order  success,
mbed_memory_order  failure 
)

Definition at line 819 of file mbed_atomic_impl.h.

◆ core_util_atomic_compare_exchange_weak_ptr()

MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_ptr ( void *volatile *  ptr,
void **  expectedCurrentValue,
void *  desiredValue 
)

Atomic compare and set.

It compares the contents of a memory location to a given value and, only if they are the same, modifies the contents of that memory location to a given new value. This is done as a single atomic operation. The atomicity guarantees that the new value is calculated based on up-to-date information; if the value had been updated by another thread in the meantime, the write would fail due to a mismatched expectedCurrentValue.

Refer to https://en.wikipedia.org/wiki/Compare-and-set [which may redirect you to the article on compare-and swap].

Parameters
ptrThe target memory location.
[in,out]expectedCurrentValueA pointer to some location holding the expected current value of the data being set atomically. The computed 'desiredValue' should be a function of this current value.
Note
: This is an in-out parameter. In the failure case of atomic_cas (where the destination isn't set), the pointee of expectedCurrentValue is updated with the current value.
Parameters
[in]desiredValueThe new value computed based on '*expectedCurrentValue'.
Returns
true if the memory location was atomically updated with the desired value (after verifying that it contained the expectedCurrentValue), false otherwise. In the failure case, exepctedCurrentValue is updated with the new value of the target memory location.

pseudocode: function cas(p : pointer to int, old : pointer to int, new : int) returns bool { if *p != *old or spurious failure { *old = *p return false } *p = new return true }

Note
: In the failure case (where the destination isn't set), the value pointed to by expectedCurrentValue is instead updated with the current value. This property helps writing concise code for the following incr:

function incr(p : pointer to int, a : int) returns int { done = false value = *p // This fetch operation need not be atomic. while not done { done = atomic_compare_exchange_weak(p, &value, value + a) // *value gets updated automatically until success } return value + a }

Note
: This corresponds to the C11 "atomic_compare_exchange_weak" - it may spuriously fail if the current value is expected, as per the pseudocode above; it will not spuriously fail as "atomic_compare_exchange_weak" may. This call would normally be used when a fail return will cause a retry anyway, saving the need for an extra loop inside the cas operation.

Definition at line 824 of file mbed_atomic_impl.h.

◆ core_util_atomic_compare_exchange_weak_explicit_ptr()

MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_ptr ( void *volatile *  ptr,
void **  expectedCurrentValue,
void *  desiredValue,
mbed_memory_order  success,
mbed_memory_order  failure 
)

Definition at line 839 of file mbed_atomic_impl.h.

◆ core_util_atomic_load_u8()

MBED_FORCEINLINE uint8_t core_util_atomic_load_u8 ( const volatile uint8_t *  valuePtr)

Atomic load.

Parameters
valuePtrTarget memory location.
Returns
The loaded value.

Definition at line 722 of file mbed_atomic_impl.h.

◆ core_util_atomic_load_explicit_u8()

MBED_FORCEINLINE uint8_t core_util_atomic_load_explicit_u8 ( const volatile uint8_t *  valuePtr,
mbed_memory_order  order 
)

Atomic load with explicit ordering.

Parameters
valuePtrTarget memory location.
ordermemory ordering constraint
Returns
The loaded value.

Definition at line 722 of file mbed_atomic_impl.h.

◆ core_util_atomic_load_u16()

MBED_FORCEINLINE uint16_t core_util_atomic_load_u16 ( const volatile uint16_t *  valuePtr)

Definition at line 723 of file mbed_atomic_impl.h.

◆ core_util_atomic_load_explicit_u16()

MBED_FORCEINLINE uint16_t core_util_atomic_load_explicit_u16 ( const volatile uint16_t *  valuePtr,
mbed_memory_order  order 
)

Definition at line 723 of file mbed_atomic_impl.h.

◆ core_util_atomic_load_u32()

MBED_FORCEINLINE uint32_t core_util_atomic_load_u32 ( const volatile uint32_t *  valuePtr)

Definition at line 724 of file mbed_atomic_impl.h.

◆ core_util_atomic_load_explicit_u32()

MBED_FORCEINLINE uint32_t core_util_atomic_load_explicit_u32 ( const volatile uint32_t *  valuePtr,
mbed_memory_order  order 
)

Definition at line 724 of file mbed_atomic_impl.h.

◆ core_util_atomic_load_u64()

uint64_t core_util_atomic_load_u64 ( const volatile uint64_t *  valuePtr)

◆ core_util_atomic_load_explicit_u64()

MBED_FORCEINLINE uint64_t core_util_atomic_load_explicit_u64 ( const volatile uint64_t *  valuePtr,
mbed_memory_order  order 
)

◆ core_util_atomic_load_s8()

MBED_FORCEINLINE int8_t core_util_atomic_load_s8 ( const volatile int8_t *  valuePtr)

Definition at line 725 of file mbed_atomic_impl.h.

◆ core_util_atomic_load_explicit_s8()

MBED_FORCEINLINE int8_t core_util_atomic_load_explicit_s8 ( const volatile int8_t *  valuePtr,
mbed_memory_order  order 
)

Definition at line 725 of file mbed_atomic_impl.h.

◆ core_util_atomic_load_s16()

MBED_FORCEINLINE int16_t core_util_atomic_load_s16 ( const volatile int16_t *  valuePtr)

Definition at line 726 of file mbed_atomic_impl.h.

◆ core_util_atomic_load_explicit_s16()

MBED_FORCEINLINE int16_t core_util_atomic_load_explicit_s16 ( const volatile int16_t *  valuePtr,
mbed_memory_order  order 
)

Definition at line 726 of file mbed_atomic_impl.h.

◆ core_util_atomic_load_s32()

MBED_FORCEINLINE int32_t core_util_atomic_load_s32 ( const volatile int32_t *  valuePtr)

Definition at line 727 of file mbed_atomic_impl.h.

◆ core_util_atomic_load_explicit_s32()

MBED_FORCEINLINE int32_t core_util_atomic_load_explicit_s32 ( const volatile int32_t *  valuePtr,
mbed_memory_order  order 
)

Definition at line 727 of file mbed_atomic_impl.h.

◆ core_util_atomic_load_s64()

MBED_FORCEINLINE int64_t core_util_atomic_load_s64 ( const volatile int64_t *  valuePtr)

Definition at line 737 of file mbed_atomic_impl.h.

◆ core_util_atomic_load_explicit_s64()

MBED_FORCEINLINE int64_t core_util_atomic_load_explicit_s64 ( const volatile int64_t *  valuePtr,
MBED_UNUSED mbed_memory_order  order 
)

Atomic load with explicit ordering.

Parameters
valuePtrTarget memory location.
orderCurrently unused since 64-bit atomic ops must be emulated
Returns
The loaded value.

Definition at line 981 of file mbed_atomic_impl.h.

◆ core_util_atomic_load_bool()

MBED_FORCEINLINE bool core_util_atomic_load_bool ( const volatile bool *  valuePtr)

Definition at line 728 of file mbed_atomic_impl.h.

◆ core_util_atomic_load_explicit_bool()

MBED_FORCEINLINE bool core_util_atomic_load_explicit_bool ( const volatile bool *  valuePtr,
mbed_memory_order  order 
)

Definition at line 728 of file mbed_atomic_impl.h.

◆ core_util_atomic_load_ptr()

MBED_FORCEINLINE void * core_util_atomic_load_ptr ( void *const volatile *  valuePtr)

Definition at line 729 of file mbed_atomic_impl.h.

◆ core_util_atomic_load_explicit_ptr()

MBED_FORCEINLINE void * core_util_atomic_load_explicit_ptr ( void *const volatile *  valuePtr,
mbed_memory_order  order 
)

Definition at line 729 of file mbed_atomic_impl.h.

◆ core_util_atomic_store_u8()

MBED_FORCEINLINE void core_util_atomic_store_u8 ( volatile uint8_t *  valuePtr,
uint8_t  desiredValue 
)

Atomic store.

Parameters
valuePtrTarget memory location.
desiredValueThe value to store.

Definition at line 722 of file mbed_atomic_impl.h.

◆ core_util_atomic_store_explicit_u8()

MBED_FORCEINLINE void core_util_atomic_store_explicit_u8 ( volatile uint8_t *  valuePtr,
uint8_t  desiredValue,
mbed_memory_order  order 
)

Atomic store with explicit ordering.

Parameters
valuePtrTarget memory location.
desiredValueThe value to store.
ordermemory ordering constraint

Definition at line 722 of file mbed_atomic_impl.h.

◆ core_util_atomic_store_u16()

MBED_FORCEINLINE void core_util_atomic_store_u16 ( volatile uint16_t *  valuePtr,
uint16_t  desiredValue 
)

Definition at line 723 of file mbed_atomic_impl.h.

◆ core_util_atomic_store_explicit_u16()

MBED_FORCEINLINE void core_util_atomic_store_explicit_u16 ( volatile uint16_t *  valuePtr,
uint16_t  desiredValue,
mbed_memory_order  order 
)

Definition at line 723 of file mbed_atomic_impl.h.

◆ core_util_atomic_store_u32()

MBED_FORCEINLINE void core_util_atomic_store_u32 ( volatile uint32_t *  valuePtr,
uint32_t  desiredValue 
)

Definition at line 724 of file mbed_atomic_impl.h.

◆ core_util_atomic_store_explicit_u32()

MBED_FORCEINLINE void core_util_atomic_store_explicit_u32 ( volatile uint32_t *  valuePtr,
uint32_t  desiredValue,
mbed_memory_order  order 
)

Definition at line 724 of file mbed_atomic_impl.h.

◆ core_util_atomic_store_u64()

void core_util_atomic_store_u64 ( volatile uint64_t *  valuePtr,
uint64_t  desiredValue 
)

◆ core_util_atomic_store_explicit_u64()

MBED_FORCEINLINE void core_util_atomic_store_explicit_u64 ( volatile uint64_t *  valuePtr,
uint64_t  desiredValue,
mbed_memory_order  order 
)

◆ core_util_atomic_store_s8()

MBED_FORCEINLINE void core_util_atomic_store_s8 ( volatile int8_t *  valuePtr,
int8_t  desiredValue 
)

Definition at line 725 of file mbed_atomic_impl.h.

◆ core_util_atomic_store_explicit_s8()

MBED_FORCEINLINE void core_util_atomic_store_explicit_s8 ( volatile int8_t *  valuePtr,
int8_t  desiredValue,
mbed_memory_order  order 
)

Definition at line 725 of file mbed_atomic_impl.h.

◆ core_util_atomic_store_s16()

MBED_FORCEINLINE void core_util_atomic_store_s16 ( volatile int16_t *  valuePtr,
int16_t  desiredValue 
)

Definition at line 726 of file mbed_atomic_impl.h.

◆ core_util_atomic_store_explicit_s16()

MBED_FORCEINLINE void core_util_atomic_store_explicit_s16 ( volatile int16_t *  valuePtr,
int16_t  desiredValue,
mbed_memory_order  order 
)

Definition at line 726 of file mbed_atomic_impl.h.

◆ core_util_atomic_store_s32()

MBED_FORCEINLINE void core_util_atomic_store_s32 ( volatile int32_t *  valuePtr,
int32_t  desiredValue 
)

Definition at line 727 of file mbed_atomic_impl.h.

◆ core_util_atomic_store_explicit_s32()

MBED_FORCEINLINE void core_util_atomic_store_explicit_s32 ( volatile int32_t *  valuePtr,
int32_t  desiredValue,
mbed_memory_order  order 
)

Definition at line 727 of file mbed_atomic_impl.h.

◆ core_util_atomic_store_s64()

MBED_FORCEINLINE void core_util_atomic_store_s64 ( volatile int64_t *  valuePtr,
int64_t  desiredValue 
)

Definition at line 742 of file mbed_atomic_impl.h.

◆ core_util_atomic_store_explicit_s64()

MBED_FORCEINLINE void core_util_atomic_store_explicit_s64 ( volatile int64_t *  valuePtr,
int64_t  desiredValue,
mbed_memory_order  order 
)

◆ core_util_atomic_store_bool()

MBED_FORCEINLINE void core_util_atomic_store_bool ( volatile bool *  valuePtr,
bool  desiredValue 
)

Definition at line 728 of file mbed_atomic_impl.h.

◆ core_util_atomic_store_explicit_bool()

MBED_FORCEINLINE void core_util_atomic_store_explicit_bool ( volatile bool *  valuePtr,
bool  desiredValue,
mbed_memory_order  order 
)

Definition at line 728 of file mbed_atomic_impl.h.

◆ core_util_atomic_store_ptr()

MBED_FORCEINLINE void core_util_atomic_store_ptr ( void *volatile *  valuePtr,
void *  desiredValue 
)

Definition at line 729 of file mbed_atomic_impl.h.

◆ core_util_atomic_store_explicit_ptr()

MBED_FORCEINLINE void core_util_atomic_store_explicit_ptr ( void *volatile *  valuePtr,
void *  desiredValue,
mbed_memory_order  order 
)

Definition at line 729 of file mbed_atomic_impl.h.

◆ core_util_atomic_exchange_u8()

uint8_t core_util_atomic_exchange_u8 ( volatile uint8_t *  valuePtr,
uint8_t  desiredValue 
)

Atomic exchange.

Parameters
valuePtrTarget memory location.
desiredValueThe value to store.
Returns
The previous value.

◆ core_util_atomic_exchange_explicit_u8()

MBED_FORCEINLINE uint8_t core_util_atomic_exchange_explicit_u8 ( volatile uint8_t *  valuePtr,
uint8_t  desiredValue,
mbed_memory_order  order 
)

Atomic exchange.

Parameters
valuePtrTarget memory location.
desiredValueThe value to store.
Returns
The previous value.
Parameters
ordermemory ordering constraint

◆ core_util_atomic_exchange_u16()

uint16_t core_util_atomic_exchange_u16 ( volatile uint16_t *  valuePtr,
uint16_t  desiredValue 
)

Atomic exchange.

Parameters
valuePtrTarget memory location.
desiredValueThe value to store.
Returns
The previous value.

◆ core_util_atomic_exchange_explicit_u16()

MBED_FORCEINLINE uint16_t core_util_atomic_exchange_explicit_u16 ( volatile uint16_t *  valuePtr,
uint16_t  desiredValue,
mbed_memory_order  order 
)

◆ core_util_atomic_exchange_u32()

uint32_t core_util_atomic_exchange_u32 ( volatile uint32_t *  valuePtr,
uint32_t  desiredValue 
)

Atomic exchange.

Parameters
valuePtrTarget memory location.
desiredValueThe value to store.
Returns
The previous value.

◆ core_util_atomic_exchange_explicit_u32()

MBED_FORCEINLINE uint32_t core_util_atomic_exchange_explicit_u32 ( volatile uint32_t *  valuePtr,
uint32_t  desiredValue,
mbed_memory_order  order 
)

◆ core_util_atomic_exchange_u64()

uint64_t core_util_atomic_exchange_u64 ( volatile uint64_t *  valuePtr,
uint64_t  desiredValue 
)

Atomic exchange.

Parameters
valuePtrTarget memory location.
desiredValueThe value to store.
Returns
The previous value.

◆ core_util_atomic_exchange_explicit_u64()

MBED_FORCEINLINE uint64_t core_util_atomic_exchange_explicit_u64 ( volatile uint64_t *  valuePtr,
uint64_t  desiredValue,
mbed_memory_order  order 
)

◆ core_util_atomic_exchange_s8()

MBED_FORCEINLINE int8_t core_util_atomic_exchange_s8 ( volatile int8_t *  valuePtr,
int8_t  desiredValue 
)

Atomic exchange.

Parameters
valuePtrTarget memory location.
desiredValueThe value to store.
Returns
The previous value.

Definition at line 880 of file mbed_atomic_impl.h.

◆ core_util_atomic_exchange_explicit_s8()

MBED_FORCEINLINE int8_t core_util_atomic_exchange_explicit_s8 ( volatile int8_t *  valuePtr,
int8_t  desiredValue,
mbed_memory_order  order 
)

Definition at line 886 of file mbed_atomic_impl.h.

◆ core_util_atomic_exchange_s16()

MBED_FORCEINLINE int16_t core_util_atomic_exchange_s16 ( volatile int16_t *  valuePtr,
int16_t  desiredValue 
)

Atomic exchange.

Parameters
valuePtrTarget memory location.
desiredValueThe value to store.
Returns
The previous value.

Definition at line 880 of file mbed_atomic_impl.h.

◆ core_util_atomic_exchange_explicit_s16()

MBED_FORCEINLINE int16_t core_util_atomic_exchange_explicit_s16 ( volatile int16_t *  valuePtr,
int16_t  desiredValue,
mbed_memory_order  order 
)

Definition at line 886 of file mbed_atomic_impl.h.

◆ core_util_atomic_exchange_s32()

MBED_FORCEINLINE int32_t core_util_atomic_exchange_s32 ( volatile int32_t *  valuePtr,
int32_t  desiredValue 
)

Atomic exchange.

Parameters
valuePtrTarget memory location.
desiredValueThe value to store.
Returns
The previous value.

Definition at line 880 of file mbed_atomic_impl.h.

◆ core_util_atomic_exchange_explicit_s32()

MBED_FORCEINLINE int32_t core_util_atomic_exchange_explicit_s32 ( volatile int32_t *  valuePtr,
int32_t  desiredValue,
mbed_memory_order  order 
)

Definition at line 886 of file mbed_atomic_impl.h.

◆ core_util_atomic_exchange_s64()

MBED_FORCEINLINE int64_t core_util_atomic_exchange_s64 ( volatile int64_t *  valuePtr,
int64_t  desiredValue 
)

Atomic exchange.

Parameters
valuePtrTarget memory location.
desiredValueThe value to store.
Returns
The previous value.

Definition at line 880 of file mbed_atomic_impl.h.

◆ core_util_atomic_exchange_explicit_s64()

MBED_FORCEINLINE int64_t core_util_atomic_exchange_explicit_s64 ( volatile int64_t *  valuePtr,
int64_t  desiredValue,
mbed_memory_order  order 
)

Definition at line 886 of file mbed_atomic_impl.h.

◆ core_util_atomic_exchange_bool()

MBED_FORCEINLINE bool core_util_atomic_exchange_bool ( volatile bool *  valuePtr,
bool  desiredValue 
)

Atomic exchange.

Parameters
valuePtrTarget memory location.
desiredValueThe value to store.
Returns
The previous value.

Definition at line 890 of file mbed_atomic_impl.h.

◆ core_util_atomic_exchange_explicit_bool()

MBED_FORCEINLINE bool core_util_atomic_exchange_explicit_bool ( volatile bool *  valuePtr,
bool  desiredValue,
mbed_memory_order  order 
)

Definition at line 895 of file mbed_atomic_impl.h.

◆ core_util_atomic_exchange_ptr()

void * core_util_atomic_exchange_ptr ( void *volatile *  valuePtr,
void *  desiredValue 
)

Atomic exchange.

Parameters
valuePtrTarget memory location.
desiredValueThe value to store.
Returns
The previous value.

Definition at line 900 of file mbed_atomic_impl.h.

◆ core_util_atomic_exchange_explicit_ptr()

MBED_FORCEINLINE void * core_util_atomic_exchange_explicit_ptr ( void *volatile *  valuePtr,
void *  desiredValue,
mbed_memory_order  order 
)

Definition at line 909 of file mbed_atomic_impl.h.

◆ core_util_atomic_incr_u8()

uint8_t core_util_atomic_incr_u8 ( volatile uint8_t *  valuePtr,
uint8_t  delta 
)

Atomic increment.

Parameters
valuePtrTarget memory location being incremented.
deltaThe amount being incremented.
Returns
The new incremented value.

◆ core_util_atomic_incr_u16()

uint16_t core_util_atomic_incr_u16 ( volatile uint16_t *  valuePtr,
uint16_t  delta 
)

Atomic increment.

Parameters
valuePtrTarget memory location being incremented.
deltaThe amount being incremented.
Returns
The new incremented value.

◆ core_util_atomic_incr_u32()

uint32_t core_util_atomic_incr_u32 ( volatile uint32_t *  valuePtr,
uint32_t  delta 
)

Atomic increment.

Parameters
valuePtrTarget memory location being incremented.
deltaThe amount being incremented.
Returns
The new incremented value.

◆ core_util_atomic_incr_u64()

uint64_t core_util_atomic_incr_u64 ( volatile uint64_t *  valuePtr,
uint64_t  delta 
)

Atomic increment.

Parameters
valuePtrTarget memory location being incremented.
deltaThe amount being incremented.
Returns
The new incremented value.

◆ core_util_atomic_incr_s8()

MBED_FORCEINLINE int8_t core_util_atomic_incr_s8 ( volatile int8_t *  valuePtr,
int8_t  delta 
)

Atomic increment.

Parameters
valuePtrTarget memory location being incremented.
deltaThe amount being incremented.
Returns
The new incremented value.

Definition at line 881 of file mbed_atomic_impl.h.

◆ core_util_atomic_incr_s16()

MBED_FORCEINLINE int16_t core_util_atomic_incr_s16 ( volatile int16_t *  valuePtr,
int16_t  delta 
)

Atomic increment.

Parameters
valuePtrTarget memory location being incremented.
deltaThe amount being incremented.
Returns
The new incremented value.

Definition at line 881 of file mbed_atomic_impl.h.

◆ core_util_atomic_incr_s32()

MBED_FORCEINLINE int32_t core_util_atomic_incr_s32 ( volatile int32_t *  valuePtr,
int32_t  delta 
)

Atomic increment.

Parameters
valuePtrTarget memory location being incremented.
deltaThe amount being incremented.
Returns
The new incremented value.

Definition at line 881 of file mbed_atomic_impl.h.

◆ core_util_atomic_incr_s64()

MBED_FORCEINLINE int64_t core_util_atomic_incr_s64 ( volatile int64_t *  valuePtr,
int64_t  delta 
)

Atomic increment.

Parameters
valuePtrTarget memory location being incremented.
deltaThe amount being incremented.
Returns
The new incremented value.

Definition at line 881 of file mbed_atomic_impl.h.

◆ core_util_atomic_incr_ptr()

void * core_util_atomic_incr_ptr ( void *volatile *  valuePtr,
ptrdiff_t  delta 
)

Atomic increment.

Parameters
valuePtrTarget memory location being incremented.
deltaThe amount being incremented.
Returns
The new incremented value.

Definition at line 918 of file mbed_atomic_impl.h.

◆ core_util_atomic_decr_u8()

uint8_t core_util_atomic_decr_u8 ( volatile uint8_t *  valuePtr,
uint8_t  delta 
)

Atomic decrement.

Parameters
valuePtrTarget memory location being decremented.
deltaThe amount being decremented.
Returns
The new decremented value.

◆ core_util_atomic_decr_u16()

uint16_t core_util_atomic_decr_u16 ( volatile uint16_t *  valuePtr,
uint16_t  delta 
)

Atomic decrement.

Parameters
valuePtrTarget memory location being decremented.
deltaThe amount being decremented.
Returns
The new decremented value.

◆ core_util_atomic_decr_u32()

uint32_t core_util_atomic_decr_u32 ( volatile uint32_t *  valuePtr,
uint32_t  delta 
)

Atomic decrement.

Parameters
valuePtrTarget memory location being decremented.
deltaThe amount being decremented.
Returns
The new decremented value.

◆ core_util_atomic_decr_u64()

uint64_t core_util_atomic_decr_u64 ( volatile uint64_t *  valuePtr,
uint64_t  delta 
)

Atomic decrement.

Parameters
valuePtrTarget memory location being decremented.
deltaThe amount being decremented.
Returns
The new decremented value.

◆ core_util_atomic_decr_s8()

MBED_FORCEINLINE int8_t core_util_atomic_decr_s8 ( volatile int8_t *  valuePtr,
int8_t  delta 
)

Atomic decrement.

Parameters
valuePtrTarget memory location being decremented.
deltaThe amount being decremented.
Returns
The new decremented value.

Definition at line 882 of file mbed_atomic_impl.h.

◆ core_util_atomic_decr_s16()

MBED_FORCEINLINE int16_t core_util_atomic_decr_s16 ( volatile int16_t *  valuePtr,
int16_t  delta 
)

Atomic decrement.

Parameters
valuePtrTarget memory location being decremented.
deltaThe amount being decremented.
Returns
The new decremented value.

Definition at line 882 of file mbed_atomic_impl.h.

◆ core_util_atomic_decr_s32()

MBED_FORCEINLINE int32_t core_util_atomic_decr_s32 ( volatile int32_t *  valuePtr,
int32_t  delta 
)

Atomic decrement.

Parameters
valuePtrTarget memory location being decremented.
deltaThe amount being decremented.
Returns
The new decremented value.

Definition at line 882 of file mbed_atomic_impl.h.

◆ core_util_atomic_decr_s64()

MBED_FORCEINLINE int64_t core_util_atomic_decr_s64 ( volatile int64_t *  valuePtr,
int64_t  delta 
)

Atomic decrement.

Parameters
valuePtrTarget memory location being decremented.
deltaThe amount being decremented.
Returns
The new decremented value.

Definition at line 882 of file mbed_atomic_impl.h.

◆ core_util_atomic_decr_ptr()

void * core_util_atomic_decr_ptr ( void *volatile *  valuePtr,
ptrdiff_t  delta 
)

Atomic decrement.

Parameters
valuePtrTarget memory location being decremented.
deltaThe amount being decremented.
Returns
The new decremented value.

Definition at line 927 of file mbed_atomic_impl.h.

◆ core_util_atomic_fetch_add_u8()

uint8_t core_util_atomic_fetch_add_u8 ( volatile uint8_t *  valuePtr,
uint8_t  arg 
)

Atomic add.

Parameters
valuePtrTarget memory location being modified.
argThe argument for the addition.
Returns
The original value.

◆ core_util_atomic_fetch_add_explicit_u8()

MBED_FORCEINLINE uint8_t core_util_atomic_fetch_add_explicit_u8 ( volatile uint8_t *  valuePtr,
uint8_t  arg,
mbed_memory_order  order 
)

Atomic add.

Parameters
valuePtrTarget memory location being modified.
argThe argument for the addition.
Returns
The original value.
Parameters
ordermemory ordering constraint

◆ core_util_atomic_fetch_add_u16()

uint16_t core_util_atomic_fetch_add_u16 ( volatile uint16_t *  valuePtr,
uint16_t  arg 
)

Atomic add.

Parameters
valuePtrTarget memory location being modified.
argThe argument for the addition.
Returns
The original value.

◆ core_util_atomic_fetch_add_explicit_u16()

MBED_FORCEINLINE uint16_t core_util_atomic_fetch_add_explicit_u16 ( volatile uint16_t *  valuePtr,
uint16_t  arg,
mbed_memory_order  order 
)

◆ core_util_atomic_fetch_add_u32()

uint32_t core_util_atomic_fetch_add_u32 ( volatile uint32_t *  valuePtr,
uint32_t  arg 
)

Atomic add.

Parameters
valuePtrTarget memory location being modified.
argThe argument for the addition.
Returns
The original value.

◆ core_util_atomic_fetch_add_explicit_u32()

MBED_FORCEINLINE uint32_t core_util_atomic_fetch_add_explicit_u32 ( volatile uint32_t *  valuePtr,
uint32_t  arg,
mbed_memory_order  order 
)

◆ core_util_atomic_fetch_add_u64()

uint64_t core_util_atomic_fetch_add_u64 ( volatile uint64_t *  valuePtr,
uint64_t  arg 
)

Atomic add.

Parameters
valuePtrTarget memory location being modified.
argThe argument for the addition.
Returns
The original value.

◆ core_util_atomic_fetch_add_explicit_u64()

MBED_FORCEINLINE uint64_t core_util_atomic_fetch_add_explicit_u64 ( volatile uint64_t *  valuePtr,
uint64_t  arg,
mbed_memory_order  order 
)

◆ core_util_atomic_fetch_add_s8()

MBED_FORCEINLINE int8_t core_util_atomic_fetch_add_s8 ( volatile int8_t *  valuePtr,
int8_t  arg 
)

Atomic add.

Parameters
valuePtrTarget memory location being modified.
argThe argument for the addition.
Returns
The original value.

Definition at line 883 of file mbed_atomic_impl.h.

◆ core_util_atomic_fetch_add_explicit_s8()

MBED_FORCEINLINE int8_t core_util_atomic_fetch_add_explicit_s8 ( volatile int8_t *  valuePtr,
int8_t  arg,
mbed_memory_order  order 
)

Definition at line 887 of file mbed_atomic_impl.h.

◆ core_util_atomic_fetch_add_s16()

MBED_FORCEINLINE int16_t core_util_atomic_fetch_add_s16 ( volatile int16_t *  valuePtr,
int16_t  arg 
)

Atomic add.

Parameters
valuePtrTarget memory location being modified.
argThe argument for the addition.
Returns
The original value.

Definition at line 883 of file mbed_atomic_impl.h.

◆ core_util_atomic_fetch_add_explicit_s16()

MBED_FORCEINLINE int16_t core_util_atomic_fetch_add_explicit_s16 ( volatile int16_t *  valuePtr,
int16_t  arg,
mbed_memory_order  order 
)

Definition at line 887 of file mbed_atomic_impl.h.

◆ core_util_atomic_fetch_add_s32()

MBED_FORCEINLINE int32_t core_util_atomic_fetch_add_s32 ( volatile int32_t *  valuePtr,
int32_t  arg 
)

Atomic add.

Parameters
valuePtrTarget memory location being modified.
argThe argument for the addition.
Returns
The original value.

Definition at line 883 of file mbed_atomic_impl.h.

◆ core_util_atomic_fetch_add_explicit_s32()

MBED_FORCEINLINE int32_t core_util_atomic_fetch_add_explicit_s32 ( volatile int32_t *  valuePtr,
int32_t  arg,
mbed_memory_order  order 
)

Definition at line 887 of file mbed_atomic_impl.h.

◆ core_util_atomic_fetch_add_s64()

MBED_FORCEINLINE int64_t core_util_atomic_fetch_add_s64 ( volatile int64_t *  valuePtr,
int64_t  arg 
)

Atomic add.

Parameters
valuePtrTarget memory location being modified.
argThe argument for the addition.
Returns
The original value.

Definition at line 883 of file mbed_atomic_impl.h.

◆ core_util_atomic_fetch_add_explicit_s64()

MBED_FORCEINLINE int64_t core_util_atomic_fetch_add_explicit_s64 ( volatile int64_t *  valuePtr,
int64_t  arg,
mbed_memory_order  order 
)

Definition at line 887 of file mbed_atomic_impl.h.

◆ core_util_atomic_fetch_add_ptr()

MBED_FORCEINLINE void * core_util_atomic_fetch_add_ptr ( void *volatile *  valuePtr,
ptrdiff_t  arg 
)

Atomic add.

Parameters
valuePtrTarget memory location being modified.
argThe argument for the addition.
Returns
The original value.

Definition at line 936 of file mbed_atomic_impl.h.

◆ core_util_atomic_fetch_add_explicit_ptr()

MBED_FORCEINLINE void * core_util_atomic_fetch_add_explicit_ptr ( void *volatile *  valuePtr,
ptrdiff_t  arg,
mbed_memory_order  order 
)

Definition at line 945 of file mbed_atomic_impl.h.

◆ core_util_atomic_fetch_sub_u8()

uint8_t core_util_atomic_fetch_sub_u8 ( volatile uint8_t *  valuePtr,
uint8_t  arg 
)

Atomic subtract.

Parameters
valuePtrTarget memory location being modified.
argThe argument for the subtraction.
Returns
The original value.

◆ core_util_atomic_fetch_sub_explicit_u8()

MBED_FORCEINLINE uint8_t core_util_atomic_fetch_sub_explicit_u8 ( volatile uint8_t *  valuePtr,
uint8_t  arg,
mbed_memory_order  order 
)

Atomic subtract.

Parameters
valuePtrTarget memory location being modified.
argThe argument for the subtraction.
Returns
The original value.
Parameters
ordermemory ordering constraint

◆ core_util_atomic_fetch_sub_u16()

uint16_t core_util_atomic_fetch_sub_u16 ( volatile uint16_t *  valuePtr,
uint16_t  arg 
)

Atomic subtract.

Parameters
valuePtrTarget memory location being modified.
argThe argument for the subtraction.
Returns
The original value.

◆ core_util_atomic_fetch_sub_explicit_u16()

MBED_FORCEINLINE uint16_t core_util_atomic_fetch_sub_explicit_u16 ( volatile uint16_t *  valuePtr,
uint16_t  arg,
mbed_memory_order  order 
)

◆ core_util_atomic_fetch_sub_u32()

uint32_t core_util_atomic_fetch_sub_u32 ( volatile uint32_t *  valuePtr,
uint32_t  arg 
)

Atomic subtract.

Parameters
valuePtrTarget memory location being modified.
argThe argument for the subtraction.
Returns
The original value.

◆ core_util_atomic_fetch_sub_explicit_u32()

MBED_FORCEINLINE uint32_t core_util_atomic_fetch_sub_explicit_u32 ( volatile uint32_t *  valuePtr,
uint32_t  arg,
mbed_memory_order  order 
)

◆ core_util_atomic_fetch_sub_u64()

uint64_t core_util_atomic_fetch_sub_u64 ( volatile uint64_t *  valuePtr,
uint64_t  arg 
)

Atomic subtract.

Parameters
valuePtrTarget memory location being modified.
argThe argument for the subtraction.
Returns
The original value.

◆ core_util_atomic_fetch_sub_explicit_u64()

MBED_FORCEINLINE uint64_t core_util_atomic_fetch_sub_explicit_u64 ( volatile uint64_t *  valuePtr,
uint64_t  arg,
mbed_memory_order  order 
)

◆ core_util_atomic_fetch_sub_s8()

MBED_FORCEINLINE int8_t core_util_atomic_fetch_sub_s8 ( volatile int8_t *  valuePtr,
int8_t  arg 
)

Atomic subtract.

Parameters
valuePtrTarget memory location being modified.
argThe argument for the subtraction.
Returns
The original value.

Definition at line 884 of file mbed_atomic_impl.h.

◆ core_util_atomic_fetch_sub_explicit_s8()

MBED_FORCEINLINE int8_t core_util_atomic_fetch_sub_explicit_s8 ( volatile int8_t *  valuePtr,
int8_t  arg,
mbed_memory_order  order 
)

Definition at line 888 of file mbed_atomic_impl.h.

◆ core_util_atomic_fetch_sub_s16()

MBED_FORCEINLINE int16_t core_util_atomic_fetch_sub_s16 ( volatile int16_t *  valuePtr,
int16_t  arg 
)

Atomic subtract.

Parameters
valuePtrTarget memory location being modified.
argThe argument for the subtraction.
Returns
The original value.

Definition at line 884 of file mbed_atomic_impl.h.

◆ core_util_atomic_fetch_sub_explicit_s16()

MBED_FORCEINLINE int16_t core_util_atomic_fetch_sub_explicit_s16 ( volatile int16_t *  valuePtr,
int16_t  arg,
mbed_memory_order  order 
)

Definition at line 888 of file mbed_atomic_impl.h.

◆ core_util_atomic_fetch_sub_s32()

MBED_FORCEINLINE int32_t core_util_atomic_fetch_sub_s32 ( volatile int32_t *  valuePtr,
int32_t  arg 
)

Atomic subtract.

Parameters
valuePtrTarget memory location being modified.
argThe argument for the subtraction.
Returns
The original value.

Definition at line 884 of file mbed_atomic_impl.h.

◆ core_util_atomic_fetch_sub_explicit_s32()

MBED_FORCEINLINE int32_t core_util_atomic_fetch_sub_explicit_s32 ( volatile int32_t *  valuePtr,
int32_t  arg,
mbed_memory_order  order 
)

Definition at line 888 of file mbed_atomic_impl.h.

◆ core_util_atomic_fetch_sub_s64()

MBED_FORCEINLINE int64_t core_util_atomic_fetch_sub_s64 ( volatile int64_t *  valuePtr,
int64_t  arg 
)

Atomic subtract.

Parameters
valuePtrTarget memory location being modified.
argThe argument for the subtraction.
Returns
The original value.

Definition at line 884 of file mbed_atomic_impl.h.

◆ core_util_atomic_fetch_sub_explicit_s64()

MBED_FORCEINLINE int64_t core_util_atomic_fetch_sub_explicit_s64 ( volatile int64_t *  valuePtr,
int64_t  arg,
mbed_memory_order  order 
)

Definition at line 888 of file mbed_atomic_impl.h.

◆ core_util_atomic_fetch_sub_ptr()

MBED_FORCEINLINE void * core_util_atomic_fetch_sub_ptr ( void *volatile *  valuePtr,
ptrdiff_t  arg 
)

Atomic subtract.

Parameters
valuePtrTarget memory location being modified.
argThe argument for the subtraction.
Returns
The original value.

Definition at line 954 of file mbed_atomic_impl.h.

◆ core_util_atomic_fetch_sub_explicit_ptr()

MBED_FORCEINLINE void * core_util_atomic_fetch_sub_explicit_ptr ( void *volatile *  valuePtr,
ptrdiff_t  arg,
mbed_memory_order  order 
)

Definition at line 963 of file mbed_atomic_impl.h.

◆ core_util_atomic_fetch_and_u8()

uint8_t core_util_atomic_fetch_and_u8 ( volatile uint8_t *  valuePtr,
uint8_t  arg 
)

Atomic bitwise and.

Parameters
valuePtrTarget memory location being modified.
argThe argument for the bitwise operation.
Returns
The original value.

◆ core_util_atomic_fetch_and_explicit_u8()

MBED_FORCEINLINE uint8_t core_util_atomic_fetch_and_explicit_u8 ( volatile uint8_t *  valuePtr,
uint8_t  arg,
mbed_memory_order  order 
)

Atomic bitwise and.

Parameters
valuePtrTarget memory location being modified.
argThe argument for the bitwise operation.
Returns
The original value.
Parameters
ordermemory ordering constraint

◆ core_util_atomic_fetch_and_u16()

uint16_t core_util_atomic_fetch_and_u16 ( volatile uint16_t *  valuePtr,
uint16_t  arg 
)

Atomic bitwise and.

Parameters
valuePtrTarget memory location being modified.
argThe argument for the bitwise operation.
Returns
The original value.

◆ core_util_atomic_fetch_and_explicit_u16()

MBED_FORCEINLINE uint16_t core_util_atomic_fetch_and_explicit_u16 ( volatile uint16_t *  valuePtr,
uint16_t  arg,
mbed_memory_order  order 
)

◆ core_util_atomic_fetch_and_u32()

uint32_t core_util_atomic_fetch_and_u32 ( volatile uint32_t *  valuePtr,
uint32_t  arg 
)

Atomic bitwise and.

Parameters
valuePtrTarget memory location being modified.
argThe argument for the bitwise operation.
Returns
The original value.

◆ core_util_atomic_fetch_and_explicit_u32()

MBED_FORCEINLINE uint32_t core_util_atomic_fetch_and_explicit_u32 ( volatile uint32_t *  valuePtr,
uint32_t  arg,
mbed_memory_order  order 
)

◆ core_util_atomic_fetch_and_u64()

uint64_t core_util_atomic_fetch_and_u64 ( volatile uint64_t *  valuePtr,
uint64_t  arg 
)

Atomic bitwise and.

Parameters
valuePtrTarget memory location being modified.
argThe argument for the bitwise operation.
Returns
The original value.

◆ core_util_atomic_fetch_and_explicit_u64()

MBED_FORCEINLINE uint64_t core_util_atomic_fetch_and_explicit_u64 ( volatile uint64_t *  valuePtr,
uint64_t  arg,
mbed_memory_order  order 
)

◆ core_util_atomic_fetch_or_u8()

uint8_t core_util_atomic_fetch_or_u8 ( volatile uint8_t *  valuePtr,
uint8_t  arg 
)

Atomic bitwise inclusive or.

Parameters
valuePtrTarget memory location being modified.
argThe argument for the bitwise operation.
Returns
The original value.

◆ core_util_atomic_fetch_or_explicit_u8()

MBED_FORCEINLINE uint8_t core_util_atomic_fetch_or_explicit_u8 ( volatile uint8_t *  valuePtr,
uint8_t  arg,
mbed_memory_order  order 
)

Atomic bitwise inclusive or.

Parameters
valuePtrTarget memory location being modified.
argThe argument for the bitwise operation.
Returns
The original value.
Parameters
ordermemory ordering constraint

◆ core_util_atomic_fetch_or_u16()

uint16_t core_util_atomic_fetch_or_u16 ( volatile uint16_t *  valuePtr,
uint16_t  arg 
)

Atomic bitwise inclusive or.

Parameters
valuePtrTarget memory location being modified.
argThe argument for the bitwise operation.
Returns
The original value.

◆ core_util_atomic_fetch_or_explicit_u16()

MBED_FORCEINLINE uint16_t core_util_atomic_fetch_or_explicit_u16 ( volatile uint16_t *  valuePtr,
uint16_t  arg,
mbed_memory_order  order 
)

◆ core_util_atomic_fetch_or_u32()

uint32_t core_util_atomic_fetch_or_u32 ( volatile uint32_t *  valuePtr,
uint32_t  arg 
)

Atomic bitwise inclusive or.

Parameters
valuePtrTarget memory location being modified.
argThe argument for the bitwise operation.
Returns
The original value.

◆ core_util_atomic_fetch_or_explicit_u32()

MBED_FORCEINLINE uint32_t core_util_atomic_fetch_or_explicit_u32 ( volatile uint32_t *  valuePtr,
uint32_t  arg,
mbed_memory_order  order 
)

◆ core_util_atomic_fetch_or_u64()

uint64_t core_util_atomic_fetch_or_u64 ( volatile uint64_t *  valuePtr,
uint64_t  arg 
)

Atomic bitwise inclusive or.

Parameters
valuePtrTarget memory location being modified.
argThe argument for the bitwise operation.
Returns
The original value.

◆ core_util_atomic_fetch_or_explicit_u64()

MBED_FORCEINLINE uint64_t core_util_atomic_fetch_or_explicit_u64 ( volatile uint64_t *  valuePtr,
uint64_t  arg,
mbed_memory_order  order 
)

◆ core_util_atomic_fetch_xor_u8()

uint8_t core_util_atomic_fetch_xor_u8 ( volatile uint8_t *  valuePtr,
uint8_t  arg 
)

Atomic bitwise exclusive or.

Parameters
valuePtrTarget memory location being modified.
argThe argument for the bitwise operation.
Returns
The original value.

◆ core_util_atomic_fetch_xor_explicit_u8()

MBED_FORCEINLINE uint8_t core_util_atomic_fetch_xor_explicit_u8 ( volatile uint8_t *  valuePtr,
uint8_t  arg,
mbed_memory_order  order 
)

Atomic bitwise exclusive or.

Parameters
valuePtrTarget memory location being modified.
argThe argument for the bitwise operation.
Returns
The original value.
Parameters
ordermemory ordering constraint

◆ core_util_atomic_fetch_xor_u16()

uint16_t core_util_atomic_fetch_xor_u16 ( volatile uint16_t *  valuePtr,
uint16_t  arg 
)

Atomic bitwise exclusive or.

Parameters
valuePtrTarget memory location being modified.
argThe argument for the bitwise operation.
Returns
The original value.

◆ core_util_atomic_fetch_xor_explicit_u16()

MBED_FORCEINLINE uint16_t core_util_atomic_fetch_xor_explicit_u16 ( volatile uint16_t *  valuePtr,
uint16_t  arg,
mbed_memory_order  order 
)

◆ core_util_atomic_fetch_xor_u32()

uint32_t core_util_atomic_fetch_xor_u32 ( volatile uint32_t *  valuePtr,
uint32_t  arg 
)

Atomic bitwise exclusive or.

Parameters
valuePtrTarget memory location being modified.
argThe argument for the bitwise operation.
Returns
The original value.

◆ core_util_atomic_fetch_xor_explicit_u32()

MBED_FORCEINLINE uint32_t core_util_atomic_fetch_xor_explicit_u32 ( volatile uint32_t *  valuePtr,
uint32_t  arg,
mbed_memory_order  order 
)

◆ core_util_atomic_fetch_xor_u64()

uint64_t core_util_atomic_fetch_xor_u64 ( volatile uint64_t *  valuePtr,
uint64_t  arg 
)

Atomic bitwise exclusive or.

Parameters
valuePtrTarget memory location being modified.
argThe argument for the bitwise operation.
Returns
The original value.

◆ core_util_atomic_fetch_xor_explicit_u64()

MBED_FORCEINLINE uint64_t core_util_atomic_fetch_xor_explicit_u64 ( volatile uint64_t *  valuePtr,
uint64_t  arg,
mbed_memory_order  order 
)

◆ core_util_atomic_load() [1/4]

T core_util_atomic_load ( const volatile T *  valuePtr)
noexcept

◆ core_util_atomic_load() [2/4]

T core_util_atomic_load ( const T *  valuePtr)
noexcept

◆ core_util_atomic_store() [1/4]

void core_util_atomic_store ( volatile T *  valuePtr,
mstd::type_identity_t< T >  desiredValue 
)
noexcept

◆ core_util_atomic_store() [2/4]

void core_util_atomic_store ( T *  valuePtr,
mstd::type_identity_t< T >  desiredValue 
)
noexcept

◆ core_util_atomic_exchange() [1/2]

T core_util_atomic_exchange ( volatile T *  valuePtr,
mstd::type_identity_t< T >  desiredValue 
)
noexcept

Atomic exchange.

Parameters
valuePtrTarget memory location.
desiredValueThe value to store.
Returns
The previous value.

◆ core_util_atomic_compare_exchange_strong() [1/2]

bool core_util_atomic_compare_exchange_strong ( volatile T *  ptr,
mstd::type_identity_t< T > *  expectedCurrentValue,
mstd::type_identity_t< T >  desiredValue 
)
noexcept

Atomic compare and set.

It compares the contents of a memory location to a given value and, only if they are the same, modifies the contents of that memory location to a given new value. This is done as a single atomic operation. The atomicity guarantees that the new value is calculated based on up-to-date information; if the value had been updated by another thread in the meantime, the write would fail due to a mismatched expectedCurrentValue.

Refer to https://en.wikipedia.org/wiki/Compare-and-set [which may redirect you to the article on compare-and swap].

Parameters
ptrThe target memory location.
[in,out]expectedCurrentValueA pointer to some location holding the expected current value of the data being set atomically. The computed 'desiredValue' should be a function of this current value.
Note
: This is an in-out parameter. In the failure case of atomic_cas (where the destination isn't set), the pointee of expectedCurrentValue is updated with the current value.
Parameters
[in]desiredValueThe new value computed based on '*expectedCurrentValue'.
Returns
true if the memory location was atomically updated with the desired value (after verifying that it contained the expectedCurrentValue), false otherwise. In the failure case, exepctedCurrentValue is updated with the new value of the target memory location.

pseudocode: function cas(p : pointer to int, old : pointer to int, new : int) returns bool { if *p != *old { *old = *p return false } *p = new return true }

Note
: In the failure case (where the destination isn't set), the value pointed to by expectedCurrentValue is instead updated with the current value. This property helps writing concise code for the following incr:

function incr(p : pointer to int, a : int) returns int { done = false value = atomic_load(p) while not done { done = atomic_cas(p, &value, value + a) // *value gets updated automatically until success } return value + a }

However, if the call is made in a loop like this, the atomic_compare_exchange_weak functions are to be preferred.

Note
: This corresponds to the C11 "atomic_compare_exchange_strong" - it always succeeds if the current value is expected, as per the pseudocode above; it will not spuriously fail as "atomic_compare_exchange_weak" may. This call would normally be used when a fail return does not retry.

◆ core_util_atomic_compare_exchange_weak() [1/2]

bool core_util_atomic_compare_exchange_weak ( volatile T *  ptr,
mstd::type_identity_t< T > *  expectedCurrentValue,
mstd::type_identity_t< T >  desiredValue 
)
noexcept

Atomic compare and set.

It compares the contents of a memory location to a given value and, only if they are the same, modifies the contents of that memory location to a given new value. This is done as a single atomic operation. The atomicity guarantees that the new value is calculated based on up-to-date information; if the value had been updated by another thread in the meantime, the write would fail due to a mismatched expectedCurrentValue.

Refer to https://en.wikipedia.org/wiki/Compare-and-set [which may redirect you to the article on compare-and swap].

Parameters
ptrThe target memory location.
[in,out]expectedCurrentValueA pointer to some location holding the expected current value of the data being set atomically. The computed 'desiredValue' should be a function of this current value.
Note
: This is an in-out parameter. In the failure case of atomic_cas (where the destination isn't set), the pointee of expectedCurrentValue is updated with the current value.
Parameters
[in]desiredValueThe new value computed based on '*expectedCurrentValue'.
Returns
true if the memory location was atomically updated with the desired value (after verifying that it contained the expectedCurrentValue), false otherwise. In the failure case, exepctedCurrentValue is updated with the new value of the target memory location.

pseudocode: function cas(p : pointer to int, old : pointer to int, new : int) returns bool { if *p != *old or spurious failure { *old = *p return false } *p = new return true }

Note
: In the failure case (where the destination isn't set), the value pointed to by expectedCurrentValue is instead updated with the current value. This property helps writing concise code for the following incr:

function incr(p : pointer to int, a : int) returns int { done = false value = *p // This fetch operation need not be atomic. while not done { done = atomic_compare_exchange_weak(p, &value, value + a) // *value gets updated automatically until success } return value + a }

Note
: This corresponds to the C11 "atomic_compare_exchange_weak" - it may spuriously fail if the current value is expected, as per the pseudocode above; it will not spuriously fail as "atomic_compare_exchange_weak" may. This call would normally be used when a fail return will cause a retry anyway, saving the need for an extra loop inside the cas operation.

◆ core_util_atomic_fetch_add() [1/2]

T core_util_atomic_fetch_add ( volatile T *  valuePtr,
mstd::type_identity_t< T >  arg 
)
noexcept

Atomic add.

Parameters
valuePtrTarget memory location being modified.
argThe argument for the addition.
Returns
The original value.

◆ core_util_atomic_fetch_sub() [1/2]

T core_util_atomic_fetch_sub ( volatile T *  valuePtr,
mstd::type_identity_t< T >  arg 
)
noexcept

Atomic subtract.

Parameters
valuePtrTarget memory location being modified.
argThe argument for the subtraction.
Returns
The original value.

◆ core_util_atomic_fetch_and()

T core_util_atomic_fetch_and ( volatile T *  valuePtr,
mstd::type_identity_t< T >  arg 
)
noexcept

Atomic bitwise and.

Parameters
valuePtrTarget memory location being modified.
argThe argument for the bitwise operation.
Returns
The original value.

◆ core_util_atomic_fetch_or()

T core_util_atomic_fetch_or ( volatile T *  valuePtr,
mstd::type_identity_t< T >  arg 
)
noexcept

Atomic bitwise inclusive or.

Parameters
valuePtrTarget memory location being modified.
argThe argument for the bitwise operation.
Returns
The original value.

◆ core_util_atomic_fetch_xor()

T core_util_atomic_fetch_xor ( volatile T *  valuePtr,
mstd::type_identity_t< T >  arg 
)
noexcept

Atomic bitwise exclusive or.

Parameters
valuePtrTarget memory location being modified.
argThe argument for the bitwise operation.
Returns
The original value.

◆ core_util_atomic_load_explicit() [1/4]

T core_util_atomic_load_explicit ( const volatile T *  valuePtr,
mbed_memory_order  order 
)
noexcept

◆ core_util_atomic_load_explicit() [2/4]

T core_util_atomic_load_explicit ( const T *  valuePtr,
mbed_memory_order  order 
)
noexcept

◆ core_util_atomic_store_explicit() [1/4]

void core_util_atomic_store_explicit ( volatile T *  valuePtr,
mstd::type_identity_t< T >  desiredValue,
mbed_memory_order  order 
)
noexcept

◆ core_util_atomic_store_explicit() [2/4]

void core_util_atomic_store_explicit ( T *  valuePtr,
mstd::type_identity_t< T >  desiredValue,
mbed_memory_order  order 
)
noexcept

◆ core_util_atomic_exchange_explicit() [1/2]

T core_util_atomic_exchange_explicit ( volatile T *  valuePtr,
mstd::type_identity_t< T >  desiredValue,
mbed_memory_order  order 
)
noexcept

◆ core_util_atomic_compare_exchange_strong_explicit() [1/2]

bool core_util_atomic_compare_exchange_strong_explicit ( volatile T *  ptr,
mstd::type_identity_t< T > *  expectedCurrentValue,
mstd::type_identity_t< T >  desiredValue,
mbed_memory_order  success,
mbed_memory_order  failure 
)
noexcept

◆ core_util_atomic_compare_exchange_weak_explicit() [1/2]

bool core_util_atomic_compare_exchange_weak_explicit ( volatile T *  ptr,
mstd::type_identity_t< T > *  expectedCurrentValue,
mstd::type_identity_t< T >  desiredValue,
mbed_memory_order  success,
mbed_memory_order  failure 
)
noexcept

◆ core_util_atomic_fetch_add_explicit() [1/2]

T core_util_atomic_fetch_add_explicit ( volatile T *  valuePtr,
mstd::type_identity_t< T >  arg,
mbed_memory_order  order 
)
noexcept

◆ core_util_atomic_fetch_sub_explicit() [1/2]

T core_util_atomic_fetch_sub_explicit ( volatile T *  valuePtr,
mstd::type_identity_t< T >  arg,
mbed_memory_order  order 
)
noexcept

◆ core_util_atomic_fetch_and_explicit()

T core_util_atomic_fetch_and_explicit ( volatile T *  valuePtr,
mstd::type_identity_t< T >  arg,
mbed_memory_order  order 
)
noexcept

◆ core_util_atomic_fetch_or_explicit()

T core_util_atomic_fetch_or_explicit ( volatile T *  valuePtr,
mstd::type_identity_t< T >  arg,
mbed_memory_order  order 
)
noexcept

◆ core_util_atomic_fetch_xor_explicit()

T core_util_atomic_fetch_xor_explicit ( volatile T *  valuePtr,
mstd::type_identity_t< T >  arg,
mbed_memory_order  order 
)
noexcept

◆ core_util_atomic_load() [3/4]

T * core_util_atomic_load ( T *const volatile *  valuePtr)
noexcept

Definition at line 1060 of file mbed_atomic_impl.h.

◆ core_util_atomic_load() [4/4]

T * core_util_atomic_load ( T *const *  valuePtr)
noexcept

Definition at line 1066 of file mbed_atomic_impl.h.

◆ core_util_atomic_store() [3/4]

void core_util_atomic_store ( T *volatile *  valuePtr,
mstd::type_identity_t< T > *  desiredValue 
)
noexcept

◆ core_util_atomic_store() [4/4]

void core_util_atomic_store ( T **  valuePtr,
mstd::type_identity_t< T > *  desiredValue 
)
noexcept

◆ core_util_atomic_exchange() [2/2]

T * core_util_atomic_exchange ( T *volatile *  valuePtr,
mstd::type_identity_t< T > *  desiredValue 
)
noexcept

Atomic exchange.

Parameters
valuePtrTarget memory location.
desiredValueThe value to store.
Returns
The previous value.

◆ core_util_atomic_compare_exchange_strong() [2/2]

bool core_util_atomic_compare_exchange_strong ( T *volatile *  ptr,
mstd::type_identity_t< T > **  expectedCurrentValue,
mstd::type_identity_t< T > *  desiredValue 
)
noexcept

Atomic compare and set.

It compares the contents of a memory location to a given value and, only if they are the same, modifies the contents of that memory location to a given new value. This is done as a single atomic operation. The atomicity guarantees that the new value is calculated based on up-to-date information; if the value had been updated by another thread in the meantime, the write would fail due to a mismatched expectedCurrentValue.

Refer to https://en.wikipedia.org/wiki/Compare-and-set [which may redirect you to the article on compare-and swap].

Parameters
ptrThe target memory location.
[in,out]expectedCurrentValueA pointer to some location holding the expected current value of the data being set atomically. The computed 'desiredValue' should be a function of this current value.
Note
: This is an in-out parameter. In the failure case of atomic_cas (where the destination isn't set), the pointee of expectedCurrentValue is updated with the current value.
Parameters
[in]desiredValueThe new value computed based on '*expectedCurrentValue'.
Returns
true if the memory location was atomically updated with the desired value (after verifying that it contained the expectedCurrentValue), false otherwise. In the failure case, exepctedCurrentValue is updated with the new value of the target memory location.

pseudocode: function cas(p : pointer to int, old : pointer to int, new : int) returns bool { if *p != *old { *old = *p return false } *p = new return true }

Note
: In the failure case (where the destination isn't set), the value pointed to by expectedCurrentValue is instead updated with the current value. This property helps writing concise code for the following incr:

function incr(p : pointer to int, a : int) returns int { done = false value = atomic_load(p) while not done { done = atomic_cas(p, &value, value + a) // *value gets updated automatically until success } return value + a }

However, if the call is made in a loop like this, the atomic_compare_exchange_weak functions are to be preferred.

Note
: This corresponds to the C11 "atomic_compare_exchange_strong" - it always succeeds if the current value is expected, as per the pseudocode above; it will not spuriously fail as "atomic_compare_exchange_weak" may. This call would normally be used when a fail return does not retry.

◆ core_util_atomic_compare_exchange_weak() [2/2]

bool core_util_atomic_compare_exchange_weak ( T *volatile *  ptr,
mstd::type_identity_t< T > **  expectedCurrentValue,
mstd::type_identity_t< T > *  desiredValue 
)
noexcept

Atomic compare and set.

It compares the contents of a memory location to a given value and, only if they are the same, modifies the contents of that memory location to a given new value. This is done as a single atomic operation. The atomicity guarantees that the new value is calculated based on up-to-date information; if the value had been updated by another thread in the meantime, the write would fail due to a mismatched expectedCurrentValue.

Refer to https://en.wikipedia.org/wiki/Compare-and-set [which may redirect you to the article on compare-and swap].

Parameters
ptrThe target memory location.
[in,out]expectedCurrentValueA pointer to some location holding the expected current value of the data being set atomically. The computed 'desiredValue' should be a function of this current value.
Note
: This is an in-out parameter. In the failure case of atomic_cas (where the destination isn't set), the pointee of expectedCurrentValue is updated with the current value.
Parameters
[in]desiredValueThe new value computed based on '*expectedCurrentValue'.
Returns
true if the memory location was atomically updated with the desired value (after verifying that it contained the expectedCurrentValue), false otherwise. In the failure case, exepctedCurrentValue is updated with the new value of the target memory location.

pseudocode: function cas(p : pointer to int, old : pointer to int, new : int) returns bool { if *p != *old or spurious failure { *old = *p return false } *p = new return true }

Note
: In the failure case (where the destination isn't set), the value pointed to by expectedCurrentValue is instead updated with the current value. This property helps writing concise code for the following incr:

function incr(p : pointer to int, a : int) returns int { done = false value = *p // This fetch operation need not be atomic. while not done { done = atomic_compare_exchange_weak(p, &value, value + a) // *value gets updated automatically until success } return value + a }

Note
: This corresponds to the C11 "atomic_compare_exchange_weak" - it may spuriously fail if the current value is expected, as per the pseudocode above; it will not spuriously fail as "atomic_compare_exchange_weak" may. This call would normally be used when a fail return will cause a retry anyway, saving the need for an extra loop inside the cas operation.

◆ core_util_atomic_fetch_add() [2/2]

T * core_util_atomic_fetch_add ( T *volatile *  valuePtr,
ptrdiff_t  arg 
)
noexcept

Atomic add.

Parameters
valuePtrTarget memory location being modified.
argThe argument for the addition.
Returns
The original value.

Definition at line 1225 of file mbed_atomic_impl.h.

◆ core_util_atomic_fetch_sub() [2/2]

T * core_util_atomic_fetch_sub ( T *volatile *  valuePtr,
ptrdiff_t  arg 
)
noexcept

Atomic subtract.

Parameters
valuePtrTarget memory location being modified.
argThe argument for the subtraction.
Returns
The original value.

Definition at line 1237 of file mbed_atomic_impl.h.

◆ core_util_atomic_load_explicit() [3/4]

T * core_util_atomic_load_explicit ( T *const volatile *  valuePtr,
mbed_memory_order  order 
)
noexcept

Definition at line 1072 of file mbed_atomic_impl.h.

◆ core_util_atomic_load_explicit() [4/4]

T * core_util_atomic_load_explicit ( T *const *  valuePtr,
mbed_memory_order  order 
)
noexcept

Definition at line 1078 of file mbed_atomic_impl.h.

◆ core_util_atomic_store_explicit() [3/4]

void core_util_atomic_store_explicit ( T *volatile *  valuePtr,
mstd::type_identity_t< T > *  desiredValue,
mbed_memory_order  order 
)
noexcept

◆ core_util_atomic_store_explicit() [4/4]

void core_util_atomic_store_explicit ( T **  valuePtr,
mstd::type_identity_t< T > *  desiredValue,
mbed_memory_order  order 
)
noexcept

◆ core_util_atomic_exchange_explicit() [2/2]

T * core_util_atomic_exchange_explicit ( T *volatile *  valuePtr,
mstd::type_identity_t< T > *  desiredValue,
mbed_memory_order  order 
)
noexcept

◆ core_util_atomic_compare_exchange_strong_explicit() [2/2]

bool core_util_atomic_compare_exchange_strong_explicit ( T *volatile *  ptr,
mstd::type_identity_t< T > **  expectedCurrentValue,
mstd::type_identity_t< T > *  desiredValue,
mbed_memory_order  success,
mbed_memory_order  failure 
)
noexcept

◆ core_util_atomic_compare_exchange_weak_explicit() [2/2]

bool core_util_atomic_compare_exchange_weak_explicit ( T *volatile *  ptr,
mstd::type_identity_t< T > **  expectedCurrentValue,
mstd::type_identity_t< T > *  desiredValue,
mbed_memory_order  success,
mbed_memory_order  failure 
)
noexcept

◆ core_util_atomic_fetch_add_explicit() [2/2]

T * core_util_atomic_fetch_add_explicit ( T *volatile *  valuePtr,
ptrdiff_t  arg,
mbed_memory_order  order 
)
noexcept

Definition at line 1231 of file mbed_atomic_impl.h.

◆ core_util_atomic_fetch_sub_explicit() [2/2]

T * core_util_atomic_fetch_sub_explicit ( T *volatile *  valuePtr,
ptrdiff_t  arg,
mbed_memory_order  order 
)
noexcept

Definition at line 1243 of file mbed_atomic_impl.h.