Mbed OS Reference
|
Atomic functions function analogously to C11 and C++11 - loads have acquire semantics, stores have release semantics, and atomic operations are sequentially consistent. More...
Data Structures | |
struct | core_util_atomic_flag |
A lock-free, primitive atomic flag. More... | |
Macros | |
#define | CORE_UTIL_ATOMIC_FLAG_INIT { 0 } |
Initializer for a core_util_atomic_flag. More... | |
Typedefs | |
typedef enum mbed_memory_order | mbed_memory_order |
Memory order constraints for atomic operations. More... | |
typedef struct core_util_atomic_flag | core_util_atomic_flag |
A lock-free, primitive atomic flag. More... | |
Enumerations | |
enum | mbed_memory_order |
Memory order constraints for atomic operations. More... | |
Functions | |
bool | core_util_atomic_flag_test_and_set (volatile core_util_atomic_flag *flagPtr) |
Atomic test and set. More... | |
MBED_FORCEINLINE bool | core_util_atomic_flag_test_and_set_explicit (volatile core_util_atomic_flag *flagPtr, mbed_memory_order order) |
Atomic test and set. More... | |
MBED_FORCEINLINE void | core_util_atomic_flag_clear (volatile core_util_atomic_flag *flagPtr) |
Atomic clear. More... | |
MBED_FORCEINLINE void | core_util_atomic_flag_clear_explicit (volatile core_util_atomic_flag *flagPtr, mbed_memory_order order) |
Atomic clear. More... | |
bool | core_util_atomic_cas_u8 (volatile uint8_t *ptr, uint8_t *expectedCurrentValue, uint8_t desiredValue) |
Atomic compare and set. More... | |
MBED_FORCEINLINE bool | core_util_atomic_cas_explicit_u8 (volatile uint8_t *ptr, uint8_t *expectedCurrentValue, uint8_t desiredValue, mbed_memory_order success, mbed_memory_order failure) |
Atomic compare and set. More... | |
bool | core_util_atomic_cas_u16 (volatile uint16_t *ptr, uint16_t *expectedCurrentValue, uint16_t desiredValue) |
Atomic compare and set. More... | |
MBED_FORCEINLINE bool | core_util_atomic_cas_explicit_u16 (volatile uint16_t *ptr, uint16_t *expectedCurrentValue, uint16_t desiredValue, mbed_memory_order success, mbed_memory_order failure) |
bool | core_util_atomic_cas_u32 (volatile uint32_t *ptr, uint32_t *expectedCurrentValue, uint32_t desiredValue) |
Atomic compare and set. More... | |
MBED_FORCEINLINE bool | core_util_atomic_cas_explicit_u32 (volatile uint32_t *ptr, uint32_t *expectedCurrentValue, uint32_t desiredValue, mbed_memory_order success, mbed_memory_order failure) |
bool | core_util_atomic_cas_u64 (volatile uint64_t *ptr, uint64_t *expectedCurrentValue, uint64_t desiredValue) |
Atomic compare and set. More... | |
MBED_FORCEINLINE bool | core_util_atomic_cas_explicit_u64 (volatile uint64_t *ptr, uint64_t *expectedCurrentValue, uint64_t desiredValue, mbed_memory_order success, mbed_memory_order failure) |
MBED_FORCEINLINE bool | core_util_atomic_cas_s8 (volatile int8_t *ptr, int8_t *expectedCurrentValue, int8_t desiredValue) |
Atomic compare and set. More... | |
MBED_FORCEINLINE bool | core_util_atomic_cas_explicit_s8 (volatile int8_t *ptr, int8_t *expectedCurrentValue, int8_t desiredValue, mbed_memory_order success, mbed_memory_order failure) |
MBED_FORCEINLINE bool | core_util_atomic_cas_s16 (volatile int16_t *ptr, int16_t *expectedCurrentValue, int16_t desiredValue) |
Atomic compare and set. More... | |
MBED_FORCEINLINE bool | core_util_atomic_cas_explicit_s16 (volatile int16_t *ptr, int16_t *expectedCurrentValue, int16_t desiredValue, mbed_memory_order success, mbed_memory_order failure) |
MBED_FORCEINLINE bool | core_util_atomic_cas_s32 (volatile int32_t *ptr, int32_t *expectedCurrentValue, int32_t desiredValue) |
Atomic compare and set. More... | |
MBED_FORCEINLINE bool | core_util_atomic_cas_explicit_s32 (volatile int32_t *ptr, int32_t *expectedCurrentValue, int32_t desiredValue, mbed_memory_order success, mbed_memory_order failure) |
MBED_FORCEINLINE bool | core_util_atomic_cas_s64 (volatile int64_t *ptr, int64_t *expectedCurrentValue, int64_t desiredValue) |
Atomic compare and set. More... | |
MBED_FORCEINLINE bool | core_util_atomic_cas_explicit_s64 (volatile int64_t *ptr, int64_t *expectedCurrentValue, int64_t desiredValue, mbed_memory_order success, mbed_memory_order failure) |
MBED_FORCEINLINE bool | core_util_atomic_cas_bool (volatile bool *ptr, bool *expectedCurrentValue, bool desiredValue) |
Atomic compare and set. More... | |
MBED_FORCEINLINE bool | core_util_atomic_cas_explicit_bool (volatile bool *ptr, bool *expectedCurrentValue, bool desiredValue, mbed_memory_order success, mbed_memory_order failure) |
bool | core_util_atomic_cas_ptr (void *volatile *ptr, void **expectedCurrentValue, void *desiredValue) |
Atomic compare and set. More... | |
MBED_FORCEINLINE bool | core_util_atomic_cas_explicit_ptr (void *volatile *ptr, void **expectedCurrentValue, void *desiredValue, mbed_memory_order success, mbed_memory_order failure) |
bool | core_util_atomic_compare_exchange_weak_u8 (volatile uint8_t *ptr, uint8_t *expectedCurrentValue, uint8_t desiredValue) |
Atomic compare and set. More... | |
MBED_FORCEINLINE bool | core_util_atomic_compare_exchange_weak_explicit_u8 (volatile uint8_t *ptr, uint8_t *expectedCurrentValue, uint8_t desiredValue, mbed_memory_order success, mbed_memory_order failure) |
Atomic compare and set. More... | |
bool | core_util_atomic_compare_exchange_weak_u16 (volatile uint16_t *ptr, uint16_t *expectedCurrentValue, uint16_t desiredValue) |
Atomic compare and set. More... | |
MBED_FORCEINLINE bool | core_util_atomic_compare_exchange_weak_explicit_u16 (volatile uint16_t *ptr, uint16_t *expectedCurrentValue, uint16_t desiredValue, mbed_memory_order success, mbed_memory_order failure) |
bool | core_util_atomic_compare_exchange_weak_u32 (volatile uint32_t *ptr, uint32_t *expectedCurrentValue, uint32_t desiredValue) |
Atomic compare and set. More... | |
MBED_FORCEINLINE bool | core_util_atomic_compare_exchange_weak_explicit_u32 (volatile uint32_t *ptr, uint32_t *expectedCurrentValue, uint32_t desiredValue, mbed_memory_order success, mbed_memory_order failure) |
bool | core_util_atomic_compare_exchange_weak_u64 (volatile uint64_t *ptr, uint64_t *expectedCurrentValue, uint64_t desiredValue) |
Atomic compare and set. More... | |
MBED_FORCEINLINE bool | core_util_atomic_compare_exchange_weak_explicit_u64 (volatile uint64_t *ptr, uint64_t *expectedCurrentValue, uint64_t desiredValue, mbed_memory_order success, mbed_memory_order failure) |
MBED_FORCEINLINE bool | core_util_atomic_compare_exchange_weak_s8 (volatile int8_t *ptr, int8_t *expectedCurrentValue, int8_t desiredValue) |
Atomic compare and set. More... | |
MBED_FORCEINLINE bool | core_util_atomic_compare_exchange_weak_explicit_s8 (volatile int8_t *ptr, int8_t *expectedCurrentValue, int8_t desiredValue, mbed_memory_order success, mbed_memory_order failure) |
MBED_FORCEINLINE bool | core_util_atomic_compare_exchange_weak_s16 (volatile int16_t *ptr, int16_t *expectedCurrentValue, int16_t desiredValue) |
Atomic compare and set. More... | |
MBED_FORCEINLINE bool | core_util_atomic_compare_exchange_weak_explicit_s16 (volatile int16_t *ptr, int16_t *expectedCurrentValue, int16_t desiredValue, mbed_memory_order success, mbed_memory_order failure) |
MBED_FORCEINLINE bool | core_util_atomic_compare_exchange_weak_s32 (volatile int32_t *ptr, int32_t *expectedCurrentValue, int32_t desiredValue) |
Atomic compare and set. More... | |
MBED_FORCEINLINE bool | core_util_atomic_compare_exchange_weak_explicit_s32 (volatile int32_t *ptr, int32_t *expectedCurrentValue, int32_t desiredValue, mbed_memory_order success, mbed_memory_order failure) |
MBED_FORCEINLINE bool | core_util_atomic_compare_exchange_weak_s64 (volatile int64_t *ptr, int64_t *expectedCurrentValue, int64_t desiredValue) |
Atomic compare and set. More... | |
MBED_FORCEINLINE bool | core_util_atomic_compare_exchange_weak_explicit_s64 (volatile int64_t *ptr, int64_t *expectedCurrentValue, int64_t desiredValue, mbed_memory_order success, mbed_memory_order failure) |
MBED_FORCEINLINE bool | core_util_atomic_compare_exchange_weak_bool (volatile bool *ptr, bool *expectedCurrentValue, bool desiredValue) |
Atomic compare and set. More... | |
MBED_FORCEINLINE bool | core_util_atomic_compare_exchange_weak_explicit_bool (volatile bool *ptr, bool *expectedCurrentValue, bool desiredValue, mbed_memory_order success, mbed_memory_order failure) |
MBED_FORCEINLINE bool | core_util_atomic_compare_exchange_weak_ptr (void *volatile *ptr, void **expectedCurrentValue, void *desiredValue) |
Atomic compare and set. More... | |
MBED_FORCEINLINE bool | core_util_atomic_compare_exchange_weak_explicit_ptr (void *volatile *ptr, void **expectedCurrentValue, void *desiredValue, mbed_memory_order success, mbed_memory_order failure) |
MBED_FORCEINLINE uint8_t | core_util_atomic_load_u8 (const volatile uint8_t *valuePtr) |
Atomic load. More... | |
MBED_FORCEINLINE uint8_t | core_util_atomic_load_explicit_u8 (const volatile uint8_t *valuePtr, mbed_memory_order order) |
Atomic load with explicit ordering. More... | |
MBED_FORCEINLINE uint16_t | core_util_atomic_load_u16 (const volatile uint16_t *valuePtr) |
MBED_FORCEINLINE uint16_t | core_util_atomic_load_explicit_u16 (const volatile uint16_t *valuePtr, mbed_memory_order order) |
MBED_FORCEINLINE uint32_t | core_util_atomic_load_u32 (const volatile uint32_t *valuePtr) |
MBED_FORCEINLINE uint32_t | core_util_atomic_load_explicit_u32 (const volatile uint32_t *valuePtr, mbed_memory_order order) |
uint64_t | core_util_atomic_load_u64 (const volatile uint64_t *valuePtr) |
MBED_FORCEINLINE uint64_t | core_util_atomic_load_explicit_u64 (const volatile uint64_t *valuePtr, mbed_memory_order order) |
MBED_FORCEINLINE int8_t | core_util_atomic_load_s8 (const volatile int8_t *valuePtr) |
MBED_FORCEINLINE int8_t | core_util_atomic_load_explicit_s8 (const volatile int8_t *valuePtr, mbed_memory_order order) |
MBED_FORCEINLINE int16_t | core_util_atomic_load_s16 (const volatile int16_t *valuePtr) |
MBED_FORCEINLINE int16_t | core_util_atomic_load_explicit_s16 (const volatile int16_t *valuePtr, mbed_memory_order order) |
MBED_FORCEINLINE int32_t | core_util_atomic_load_s32 (const volatile int32_t *valuePtr) |
MBED_FORCEINLINE int32_t | core_util_atomic_load_explicit_s32 (const volatile int32_t *valuePtr, mbed_memory_order order) |
MBED_FORCEINLINE int64_t | core_util_atomic_load_s64 (const volatile int64_t *valuePtr) |
MBED_FORCEINLINE int64_t | core_util_atomic_load_explicit_s64 (const volatile int64_t *valuePtr, MBED_UNUSED mbed_memory_order order) |
Atomic load with explicit ordering. More... | |
MBED_FORCEINLINE bool | core_util_atomic_load_bool (const volatile bool *valuePtr) |
MBED_FORCEINLINE bool | core_util_atomic_load_explicit_bool (const volatile bool *valuePtr, mbed_memory_order order) |
MBED_FORCEINLINE void * | core_util_atomic_load_ptr (void *const volatile *valuePtr) |
MBED_FORCEINLINE void * | core_util_atomic_load_explicit_ptr (void *const volatile *valuePtr, mbed_memory_order order) |
MBED_FORCEINLINE void | core_util_atomic_store_u8 (volatile uint8_t *valuePtr, uint8_t desiredValue) |
Atomic store. More... | |
MBED_FORCEINLINE void | core_util_atomic_store_explicit_u8 (volatile uint8_t *valuePtr, uint8_t desiredValue, mbed_memory_order order) |
Atomic store with explicit ordering. More... | |
MBED_FORCEINLINE void | core_util_atomic_store_u16 (volatile uint16_t *valuePtr, uint16_t desiredValue) |
MBED_FORCEINLINE void | core_util_atomic_store_explicit_u16 (volatile uint16_t *valuePtr, uint16_t desiredValue, mbed_memory_order order) |
MBED_FORCEINLINE void | core_util_atomic_store_u32 (volatile uint32_t *valuePtr, uint32_t desiredValue) |
MBED_FORCEINLINE void | core_util_atomic_store_explicit_u32 (volatile uint32_t *valuePtr, uint32_t desiredValue, mbed_memory_order order) |
void | core_util_atomic_store_u64 (volatile uint64_t *valuePtr, uint64_t desiredValue) |
MBED_FORCEINLINE void | core_util_atomic_store_explicit_u64 (volatile uint64_t *valuePtr, uint64_t desiredValue, mbed_memory_order order) |
MBED_FORCEINLINE void | core_util_atomic_store_s8 (volatile int8_t *valuePtr, int8_t desiredValue) |
MBED_FORCEINLINE void | core_util_atomic_store_explicit_s8 (volatile int8_t *valuePtr, int8_t desiredValue, mbed_memory_order order) |
MBED_FORCEINLINE void | core_util_atomic_store_s16 (volatile int16_t *valuePtr, int16_t desiredValue) |
MBED_FORCEINLINE void | core_util_atomic_store_explicit_s16 (volatile int16_t *valuePtr, int16_t desiredValue, mbed_memory_order order) |
MBED_FORCEINLINE void | core_util_atomic_store_s32 (volatile int32_t *valuePtr, int32_t desiredValue) |
MBED_FORCEINLINE void | core_util_atomic_store_explicit_s32 (volatile int32_t *valuePtr, int32_t desiredValue, mbed_memory_order order) |
MBED_FORCEINLINE void | core_util_atomic_store_s64 (volatile int64_t *valuePtr, int64_t desiredValue) |
MBED_FORCEINLINE void | core_util_atomic_store_explicit_s64 (volatile int64_t *valuePtr, int64_t desiredValue, mbed_memory_order order) |
MBED_FORCEINLINE void | core_util_atomic_store_bool (volatile bool *valuePtr, bool desiredValue) |
MBED_FORCEINLINE void | core_util_atomic_store_explicit_bool (volatile bool *valuePtr, bool desiredValue, mbed_memory_order order) |
MBED_FORCEINLINE void | core_util_atomic_store_ptr (void *volatile *valuePtr, void *desiredValue) |
MBED_FORCEINLINE void | core_util_atomic_store_explicit_ptr (void *volatile *valuePtr, void *desiredValue, mbed_memory_order order) |
uint8_t | core_util_atomic_exchange_u8 (volatile uint8_t *valuePtr, uint8_t desiredValue) |
Atomic exchange. More... | |
MBED_FORCEINLINE uint8_t | core_util_atomic_exchange_explicit_u8 (volatile uint8_t *valuePtr, uint8_t desiredValue, mbed_memory_order order) |
Atomic exchange. More... | |
uint16_t | core_util_atomic_exchange_u16 (volatile uint16_t *valuePtr, uint16_t desiredValue) |
Atomic exchange. More... | |
MBED_FORCEINLINE uint16_t | core_util_atomic_exchange_explicit_u16 (volatile uint16_t *valuePtr, uint16_t desiredValue, mbed_memory_order order) |
uint32_t | core_util_atomic_exchange_u32 (volatile uint32_t *valuePtr, uint32_t desiredValue) |
Atomic exchange. More... | |
MBED_FORCEINLINE uint32_t | core_util_atomic_exchange_explicit_u32 (volatile uint32_t *valuePtr, uint32_t desiredValue, mbed_memory_order order) |
uint64_t | core_util_atomic_exchange_u64 (volatile uint64_t *valuePtr, uint64_t desiredValue) |
Atomic exchange. More... | |
MBED_FORCEINLINE uint64_t | core_util_atomic_exchange_explicit_u64 (volatile uint64_t *valuePtr, uint64_t desiredValue, mbed_memory_order order) |
MBED_FORCEINLINE int8_t | core_util_atomic_exchange_s8 (volatile int8_t *valuePtr, int8_t desiredValue) |
Atomic exchange. More... | |
MBED_FORCEINLINE int8_t | core_util_atomic_exchange_explicit_s8 (volatile int8_t *valuePtr, int8_t desiredValue, mbed_memory_order order) |
MBED_FORCEINLINE int16_t | core_util_atomic_exchange_s16 (volatile int16_t *valuePtr, int16_t desiredValue) |
Atomic exchange. More... | |
MBED_FORCEINLINE int16_t | core_util_atomic_exchange_explicit_s16 (volatile int16_t *valuePtr, int16_t desiredValue, mbed_memory_order order) |
MBED_FORCEINLINE int32_t | core_util_atomic_exchange_s32 (volatile int32_t *valuePtr, int32_t desiredValue) |
Atomic exchange. More... | |
MBED_FORCEINLINE int32_t | core_util_atomic_exchange_explicit_s32 (volatile int32_t *valuePtr, int32_t desiredValue, mbed_memory_order order) |
MBED_FORCEINLINE int64_t | core_util_atomic_exchange_s64 (volatile int64_t *valuePtr, int64_t desiredValue) |
Atomic exchange. More... | |
MBED_FORCEINLINE int64_t | core_util_atomic_exchange_explicit_s64 (volatile int64_t *valuePtr, int64_t desiredValue, mbed_memory_order order) |
MBED_FORCEINLINE bool | core_util_atomic_exchange_bool (volatile bool *valuePtr, bool desiredValue) |
Atomic exchange. More... | |
MBED_FORCEINLINE bool | core_util_atomic_exchange_explicit_bool (volatile bool *valuePtr, bool desiredValue, mbed_memory_order order) |
void * | core_util_atomic_exchange_ptr (void *volatile *valuePtr, void *desiredValue) |
Atomic exchange. More... | |
MBED_FORCEINLINE void * | core_util_atomic_exchange_explicit_ptr (void *volatile *valuePtr, void *desiredValue, mbed_memory_order order) |
uint8_t | core_util_atomic_incr_u8 (volatile uint8_t *valuePtr, uint8_t delta) |
Atomic increment. More... | |
uint16_t | core_util_atomic_incr_u16 (volatile uint16_t *valuePtr, uint16_t delta) |
Atomic increment. More... | |
uint32_t | core_util_atomic_incr_u32 (volatile uint32_t *valuePtr, uint32_t delta) |
Atomic increment. More... | |
uint64_t | core_util_atomic_incr_u64 (volatile uint64_t *valuePtr, uint64_t delta) |
Atomic increment. More... | |
MBED_FORCEINLINE int8_t | core_util_atomic_incr_s8 (volatile int8_t *valuePtr, int8_t delta) |
Atomic increment. More... | |
MBED_FORCEINLINE int16_t | core_util_atomic_incr_s16 (volatile int16_t *valuePtr, int16_t delta) |
Atomic increment. More... | |
MBED_FORCEINLINE int32_t | core_util_atomic_incr_s32 (volatile int32_t *valuePtr, int32_t delta) |
Atomic increment. More... | |
MBED_FORCEINLINE int64_t | core_util_atomic_incr_s64 (volatile int64_t *valuePtr, int64_t delta) |
Atomic increment. More... | |
void * | core_util_atomic_incr_ptr (void *volatile *valuePtr, ptrdiff_t delta) |
Atomic increment. More... | |
uint8_t | core_util_atomic_decr_u8 (volatile uint8_t *valuePtr, uint8_t delta) |
Atomic decrement. More... | |
uint16_t | core_util_atomic_decr_u16 (volatile uint16_t *valuePtr, uint16_t delta) |
Atomic decrement. More... | |
uint32_t | core_util_atomic_decr_u32 (volatile uint32_t *valuePtr, uint32_t delta) |
Atomic decrement. More... | |
uint64_t | core_util_atomic_decr_u64 (volatile uint64_t *valuePtr, uint64_t delta) |
Atomic decrement. More... | |
MBED_FORCEINLINE int8_t | core_util_atomic_decr_s8 (volatile int8_t *valuePtr, int8_t delta) |
Atomic decrement. More... | |
MBED_FORCEINLINE int16_t | core_util_atomic_decr_s16 (volatile int16_t *valuePtr, int16_t delta) |
Atomic decrement. More... | |
MBED_FORCEINLINE int32_t | core_util_atomic_decr_s32 (volatile int32_t *valuePtr, int32_t delta) |
Atomic decrement. More... | |
MBED_FORCEINLINE int64_t | core_util_atomic_decr_s64 (volatile int64_t *valuePtr, int64_t delta) |
Atomic decrement. More... | |
void * | core_util_atomic_decr_ptr (void *volatile *valuePtr, ptrdiff_t delta) |
Atomic decrement. More... | |
uint8_t | core_util_atomic_fetch_add_u8 (volatile uint8_t *valuePtr, uint8_t arg) |
Atomic add. More... | |
MBED_FORCEINLINE uint8_t | core_util_atomic_fetch_add_explicit_u8 (volatile uint8_t *valuePtr, uint8_t arg, mbed_memory_order order) |
Atomic add. More... | |
uint16_t | core_util_atomic_fetch_add_u16 (volatile uint16_t *valuePtr, uint16_t arg) |
Atomic add. More... | |
MBED_FORCEINLINE uint16_t | core_util_atomic_fetch_add_explicit_u16 (volatile uint16_t *valuePtr, uint16_t arg, mbed_memory_order order) |
uint32_t | core_util_atomic_fetch_add_u32 (volatile uint32_t *valuePtr, uint32_t arg) |
Atomic add. More... | |
MBED_FORCEINLINE uint32_t | core_util_atomic_fetch_add_explicit_u32 (volatile uint32_t *valuePtr, uint32_t arg, mbed_memory_order order) |
uint64_t | core_util_atomic_fetch_add_u64 (volatile uint64_t *valuePtr, uint64_t arg) |
Atomic add. More... | |
MBED_FORCEINLINE uint64_t | core_util_atomic_fetch_add_explicit_u64 (volatile uint64_t *valuePtr, uint64_t arg, mbed_memory_order order) |
MBED_FORCEINLINE int8_t | core_util_atomic_fetch_add_s8 (volatile int8_t *valuePtr, int8_t arg) |
Atomic add. More... | |
MBED_FORCEINLINE int8_t | core_util_atomic_fetch_add_explicit_s8 (volatile int8_t *valuePtr, int8_t arg, mbed_memory_order order) |
MBED_FORCEINLINE int16_t | core_util_atomic_fetch_add_s16 (volatile int16_t *valuePtr, int16_t arg) |
Atomic add. More... | |
MBED_FORCEINLINE int16_t | core_util_atomic_fetch_add_explicit_s16 (volatile int16_t *valuePtr, int16_t arg, mbed_memory_order order) |
MBED_FORCEINLINE int32_t | core_util_atomic_fetch_add_s32 (volatile int32_t *valuePtr, int32_t arg) |
Atomic add. More... | |
MBED_FORCEINLINE int32_t | core_util_atomic_fetch_add_explicit_s32 (volatile int32_t *valuePtr, int32_t arg, mbed_memory_order order) |
MBED_FORCEINLINE int64_t | core_util_atomic_fetch_add_s64 (volatile int64_t *valuePtr, int64_t arg) |
Atomic add. More... | |
MBED_FORCEINLINE int64_t | core_util_atomic_fetch_add_explicit_s64 (volatile int64_t *valuePtr, int64_t arg, mbed_memory_order order) |
MBED_FORCEINLINE void * | core_util_atomic_fetch_add_ptr (void *volatile *valuePtr, ptrdiff_t arg) |
Atomic add. More... | |
MBED_FORCEINLINE void * | core_util_atomic_fetch_add_explicit_ptr (void *volatile *valuePtr, ptrdiff_t arg, mbed_memory_order order) |
uint8_t | core_util_atomic_fetch_sub_u8 (volatile uint8_t *valuePtr, uint8_t arg) |
Atomic subtract. More... | |
MBED_FORCEINLINE uint8_t | core_util_atomic_fetch_sub_explicit_u8 (volatile uint8_t *valuePtr, uint8_t arg, mbed_memory_order order) |
Atomic subtract. More... | |
uint16_t | core_util_atomic_fetch_sub_u16 (volatile uint16_t *valuePtr, uint16_t arg) |
Atomic subtract. More... | |
MBED_FORCEINLINE uint16_t | core_util_atomic_fetch_sub_explicit_u16 (volatile uint16_t *valuePtr, uint16_t arg, mbed_memory_order order) |
uint32_t | core_util_atomic_fetch_sub_u32 (volatile uint32_t *valuePtr, uint32_t arg) |
Atomic subtract. More... | |
MBED_FORCEINLINE uint32_t | core_util_atomic_fetch_sub_explicit_u32 (volatile uint32_t *valuePtr, uint32_t arg, mbed_memory_order order) |
uint64_t | core_util_atomic_fetch_sub_u64 (volatile uint64_t *valuePtr, uint64_t arg) |
Atomic subtract. More... | |
MBED_FORCEINLINE uint64_t | core_util_atomic_fetch_sub_explicit_u64 (volatile uint64_t *valuePtr, uint64_t arg, mbed_memory_order order) |
MBED_FORCEINLINE int8_t | core_util_atomic_fetch_sub_s8 (volatile int8_t *valuePtr, int8_t arg) |
Atomic subtract. More... | |
MBED_FORCEINLINE int8_t | core_util_atomic_fetch_sub_explicit_s8 (volatile int8_t *valuePtr, int8_t arg, mbed_memory_order order) |
MBED_FORCEINLINE int16_t | core_util_atomic_fetch_sub_s16 (volatile int16_t *valuePtr, int16_t arg) |
Atomic subtract. More... | |
MBED_FORCEINLINE int16_t | core_util_atomic_fetch_sub_explicit_s16 (volatile int16_t *valuePtr, int16_t arg, mbed_memory_order order) |
MBED_FORCEINLINE int32_t | core_util_atomic_fetch_sub_s32 (volatile int32_t *valuePtr, int32_t arg) |
Atomic subtract. More... | |
MBED_FORCEINLINE int32_t | core_util_atomic_fetch_sub_explicit_s32 (volatile int32_t *valuePtr, int32_t arg, mbed_memory_order order) |
MBED_FORCEINLINE int64_t | core_util_atomic_fetch_sub_s64 (volatile int64_t *valuePtr, int64_t arg) |
Atomic subtract. More... | |
MBED_FORCEINLINE int64_t | core_util_atomic_fetch_sub_explicit_s64 (volatile int64_t *valuePtr, int64_t arg, mbed_memory_order order) |
MBED_FORCEINLINE void * | core_util_atomic_fetch_sub_ptr (void *volatile *valuePtr, ptrdiff_t arg) |
Atomic subtract. More... | |
MBED_FORCEINLINE void * | core_util_atomic_fetch_sub_explicit_ptr (void *volatile *valuePtr, ptrdiff_t arg, mbed_memory_order order) |
uint8_t | core_util_atomic_fetch_and_u8 (volatile uint8_t *valuePtr, uint8_t arg) |
Atomic bitwise and. More... | |
MBED_FORCEINLINE uint8_t | core_util_atomic_fetch_and_explicit_u8 (volatile uint8_t *valuePtr, uint8_t arg, mbed_memory_order order) |
Atomic bitwise and. More... | |
uint16_t | core_util_atomic_fetch_and_u16 (volatile uint16_t *valuePtr, uint16_t arg) |
Atomic bitwise and. More... | |
MBED_FORCEINLINE uint16_t | core_util_atomic_fetch_and_explicit_u16 (volatile uint16_t *valuePtr, uint16_t arg, mbed_memory_order order) |
uint32_t | core_util_atomic_fetch_and_u32 (volatile uint32_t *valuePtr, uint32_t arg) |
Atomic bitwise and. More... | |
MBED_FORCEINLINE uint32_t | core_util_atomic_fetch_and_explicit_u32 (volatile uint32_t *valuePtr, uint32_t arg, mbed_memory_order order) |
uint64_t | core_util_atomic_fetch_and_u64 (volatile uint64_t *valuePtr, uint64_t arg) |
Atomic bitwise and. More... | |
MBED_FORCEINLINE uint64_t | core_util_atomic_fetch_and_explicit_u64 (volatile uint64_t *valuePtr, uint64_t arg, mbed_memory_order order) |
uint8_t | core_util_atomic_fetch_or_u8 (volatile uint8_t *valuePtr, uint8_t arg) |
Atomic bitwise inclusive or. More... | |
MBED_FORCEINLINE uint8_t | core_util_atomic_fetch_or_explicit_u8 (volatile uint8_t *valuePtr, uint8_t arg, mbed_memory_order order) |
Atomic bitwise inclusive or. More... | |
uint16_t | core_util_atomic_fetch_or_u16 (volatile uint16_t *valuePtr, uint16_t arg) |
Atomic bitwise inclusive or. More... | |
MBED_FORCEINLINE uint16_t | core_util_atomic_fetch_or_explicit_u16 (volatile uint16_t *valuePtr, uint16_t arg, mbed_memory_order order) |
uint32_t | core_util_atomic_fetch_or_u32 (volatile uint32_t *valuePtr, uint32_t arg) |
Atomic bitwise inclusive or. More... | |
MBED_FORCEINLINE uint32_t | core_util_atomic_fetch_or_explicit_u32 (volatile uint32_t *valuePtr, uint32_t arg, mbed_memory_order order) |
uint64_t | core_util_atomic_fetch_or_u64 (volatile uint64_t *valuePtr, uint64_t arg) |
Atomic bitwise inclusive or. More... | |
MBED_FORCEINLINE uint64_t | core_util_atomic_fetch_or_explicit_u64 (volatile uint64_t *valuePtr, uint64_t arg, mbed_memory_order order) |
uint8_t | core_util_atomic_fetch_xor_u8 (volatile uint8_t *valuePtr, uint8_t arg) |
Atomic bitwise exclusive or. More... | |
MBED_FORCEINLINE uint8_t | core_util_atomic_fetch_xor_explicit_u8 (volatile uint8_t *valuePtr, uint8_t arg, mbed_memory_order order) |
Atomic bitwise exclusive or. More... | |
uint16_t | core_util_atomic_fetch_xor_u16 (volatile uint16_t *valuePtr, uint16_t arg) |
Atomic bitwise exclusive or. More... | |
MBED_FORCEINLINE uint16_t | core_util_atomic_fetch_xor_explicit_u16 (volatile uint16_t *valuePtr, uint16_t arg, mbed_memory_order order) |
uint32_t | core_util_atomic_fetch_xor_u32 (volatile uint32_t *valuePtr, uint32_t arg) |
Atomic bitwise exclusive or. More... | |
MBED_FORCEINLINE uint32_t | core_util_atomic_fetch_xor_explicit_u32 (volatile uint32_t *valuePtr, uint32_t arg, mbed_memory_order order) |
uint64_t | core_util_atomic_fetch_xor_u64 (volatile uint64_t *valuePtr, uint64_t arg) |
Atomic bitwise exclusive or. More... | |
MBED_FORCEINLINE uint64_t | core_util_atomic_fetch_xor_explicit_u64 (volatile uint64_t *valuePtr, uint64_t arg, mbed_memory_order order) |
template<typename T > | |
T | core_util_atomic_load (const volatile T *valuePtr) noexcept |
template<typename T > | |
T | core_util_atomic_load (const T *valuePtr) noexcept |
template<typename T > | |
void | core_util_atomic_store (volatile T *valuePtr, mstd::type_identity_t< T > desiredValue) noexcept |
template<typename T > | |
void | core_util_atomic_store (T *valuePtr, mstd::type_identity_t< T > desiredValue) noexcept |
template<typename T > | |
T | core_util_atomic_exchange (volatile T *valuePtr, mstd::type_identity_t< T > desiredValue) noexcept |
Atomic exchange. More... | |
template<typename T > | |
bool | core_util_atomic_compare_exchange_strong (volatile T *ptr, mstd::type_identity_t< T > *expectedCurrentValue, mstd::type_identity_t< T > desiredValue) noexcept |
Atomic compare and set. More... | |
template<typename T > | |
bool | core_util_atomic_compare_exchange_weak (volatile T *ptr, mstd::type_identity_t< T > *expectedCurrentValue, mstd::type_identity_t< T > desiredValue) noexcept |
Atomic compare and set. More... | |
template<typename T > | |
T | core_util_atomic_fetch_add (volatile T *valuePtr, mstd::type_identity_t< T > arg) noexcept |
Atomic add. More... | |
template<typename T > | |
T | core_util_atomic_fetch_sub (volatile T *valuePtr, mstd::type_identity_t< T > arg) noexcept |
Atomic subtract. More... | |
template<typename T > | |
T | core_util_atomic_fetch_and (volatile T *valuePtr, mstd::type_identity_t< T > arg) noexcept |
Atomic bitwise and. More... | |
template<typename T > | |
T | core_util_atomic_fetch_or (volatile T *valuePtr, mstd::type_identity_t< T > arg) noexcept |
Atomic bitwise inclusive or. More... | |
template<typename T > | |
T | core_util_atomic_fetch_xor (volatile T *valuePtr, mstd::type_identity_t< T > arg) noexcept |
Atomic bitwise exclusive or. More... | |
template<typename T > | |
T | core_util_atomic_load_explicit (const volatile T *valuePtr, mbed_memory_order order) noexcept |
template<typename T > | |
T | core_util_atomic_load_explicit (const T *valuePtr, mbed_memory_order order) noexcept |
template<typename T > | |
void | core_util_atomic_store_explicit (volatile T *valuePtr, mstd::type_identity_t< T > desiredValue, mbed_memory_order order) noexcept |
template<typename T > | |
void | core_util_atomic_store_explicit (T *valuePtr, mstd::type_identity_t< T > desiredValue, mbed_memory_order order) noexcept |
template<typename T > | |
T | core_util_atomic_exchange_explicit (volatile T *valuePtr, mstd::type_identity_t< T > desiredValue, mbed_memory_order order) noexcept |
template<typename T > | |
bool | core_util_atomic_compare_exchange_strong_explicit (volatile T *ptr, mstd::type_identity_t< T > *expectedCurrentValue, mstd::type_identity_t< T > desiredValue, mbed_memory_order success, mbed_memory_order failure) noexcept |
template<typename T > | |
bool | core_util_atomic_compare_exchange_weak_explicit (volatile T *ptr, mstd::type_identity_t< T > *expectedCurrentValue, mstd::type_identity_t< T > desiredValue, mbed_memory_order success, mbed_memory_order failure) noexcept |
template<typename T > | |
T | core_util_atomic_fetch_add_explicit (volatile T *valuePtr, mstd::type_identity_t< T > arg, mbed_memory_order order) noexcept |
template<typename T > | |
T | core_util_atomic_fetch_sub_explicit (volatile T *valuePtr, mstd::type_identity_t< T > arg, mbed_memory_order order) noexcept |
template<typename T > | |
T | core_util_atomic_fetch_and_explicit (volatile T *valuePtr, mstd::type_identity_t< T > arg, mbed_memory_order order) noexcept |
template<typename T > | |
T | core_util_atomic_fetch_or_explicit (volatile T *valuePtr, mstd::type_identity_t< T > arg, mbed_memory_order order) noexcept |
template<typename T > | |
T | core_util_atomic_fetch_xor_explicit (volatile T *valuePtr, mstd::type_identity_t< T > arg, mbed_memory_order order) noexcept |
template<typename T > | |
T * | core_util_atomic_load (T *const volatile *valuePtr) noexcept |
template<typename T > | |
T * | core_util_atomic_load (T *const *valuePtr) noexcept |
template<typename T > | |
void | core_util_atomic_store (T *volatile *valuePtr, mstd::type_identity_t< T > *desiredValue) noexcept |
template<typename T > | |
void | core_util_atomic_store (T **valuePtr, mstd::type_identity_t< T > *desiredValue) noexcept |
template<typename T > | |
T * | core_util_atomic_exchange (T *volatile *valuePtr, mstd::type_identity_t< T > *desiredValue) noexcept |
Atomic exchange. More... | |
template<typename T > | |
bool | core_util_atomic_compare_exchange_strong (T *volatile *ptr, mstd::type_identity_t< T > **expectedCurrentValue, mstd::type_identity_t< T > *desiredValue) noexcept |
Atomic compare and set. More... | |
template<typename T > | |
bool | core_util_atomic_compare_exchange_weak (T *volatile *ptr, mstd::type_identity_t< T > **expectedCurrentValue, mstd::type_identity_t< T > *desiredValue) noexcept |
Atomic compare and set. More... | |
template<typename T > | |
T * | core_util_atomic_fetch_add (T *volatile *valuePtr, ptrdiff_t arg) noexcept |
Atomic add. More... | |
template<typename T > | |
T * | core_util_atomic_fetch_sub (T *volatile *valuePtr, ptrdiff_t arg) noexcept |
Atomic subtract. More... | |
template<typename T > | |
T * | core_util_atomic_load_explicit (T *const volatile *valuePtr, mbed_memory_order order) noexcept |
template<typename T > | |
T * | core_util_atomic_load_explicit (T *const *valuePtr, mbed_memory_order order) noexcept |
template<typename T > | |
void | core_util_atomic_store_explicit (T *volatile *valuePtr, mstd::type_identity_t< T > *desiredValue, mbed_memory_order order) noexcept |
template<typename T > | |
void | core_util_atomic_store_explicit (T **valuePtr, mstd::type_identity_t< T > *desiredValue, mbed_memory_order order) noexcept |
template<typename T > | |
T * | core_util_atomic_exchange_explicit (T *volatile *valuePtr, mstd::type_identity_t< T > *desiredValue, mbed_memory_order order) noexcept |
template<typename T > | |
bool | core_util_atomic_compare_exchange_strong_explicit (T *volatile *ptr, mstd::type_identity_t< T > **expectedCurrentValue, mstd::type_identity_t< T > *desiredValue, mbed_memory_order success, mbed_memory_order failure) noexcept |
template<typename T > | |
bool | core_util_atomic_compare_exchange_weak_explicit (T *volatile *ptr, mstd::type_identity_t< T > **expectedCurrentValue, mstd::type_identity_t< T > *desiredValue, mbed_memory_order success, mbed_memory_order failure) noexcept |
template<typename T > | |
T * | core_util_atomic_fetch_add_explicit (T *volatile *valuePtr, ptrdiff_t arg, mbed_memory_order order) noexcept |
template<typename T > | |
T * | core_util_atomic_fetch_sub_explicit (T *volatile *valuePtr, ptrdiff_t arg, mbed_memory_order order) noexcept |
Atomic functions function analogously to C11 and C++11 - loads have acquire semantics, stores have release semantics, and atomic operations are sequentially consistent.
Atomicity is enforced both between threads and interrupt handlers.
#define CORE_UTIL_ATOMIC_FLAG_INIT { 0 } |
Initializer for a core_util_atomic_flag.
Example:
Definition at line 130 of file mbed_atomic.h.
typedef enum mbed_memory_order mbed_memory_order |
Memory order constraints for atomic operations.
Intended semantics are as per C++11.
typedef struct core_util_atomic_flag core_util_atomic_flag |
A lock-free, primitive atomic flag.
Emulate C11's atomic_flag. The flag is initially in an indeterminate state unless explicitly initialized with CORE_UTIL_ATOMIC_FLAG_INIT.
enum mbed_memory_order |
Memory order constraints for atomic operations.
Intended semantics are as per C++11.
Definition at line 51 of file mbed_atomic.h.
bool core_util_atomic_flag_test_and_set | ( | volatile core_util_atomic_flag * | flagPtr | ) |
Atomic test and set.
Atomically tests then sets the flag to true, returning the previous value.
flagPtr | Target flag being tested and set. |
MBED_FORCEINLINE bool core_util_atomic_flag_test_and_set_explicit | ( | volatile core_util_atomic_flag * | flagPtr, |
mbed_memory_order | order | ||
) |
Atomic test and set.
Atomically tests then sets the flag to true, returning the previous value.
flagPtr | Target flag being tested and set. |
order | memory ordering constraint |
Definition at line 634 of file mbed_atomic_impl.h.
MBED_FORCEINLINE void core_util_atomic_flag_clear | ( | volatile core_util_atomic_flag * | flagPtr | ) |
Atomic clear.
flagPtr | Target flag being cleared. |
Definition at line 675 of file mbed_atomic_impl.h.
MBED_FORCEINLINE void core_util_atomic_flag_clear_explicit | ( | volatile core_util_atomic_flag * | flagPtr, |
mbed_memory_order | order | ||
) |
Atomic clear.
flagPtr | Target flag being cleared. |
order | memory ordering constraint |
Definition at line 682 of file mbed_atomic_impl.h.
bool core_util_atomic_cas_u8 | ( | volatile uint8_t * | ptr, |
uint8_t * | expectedCurrentValue, | ||
uint8_t | desiredValue | ||
) |
Atomic compare and set.
It compares the contents of a memory location to a given value and, only if they are the same, modifies the contents of that memory location to a given new value. This is done as a single atomic operation. The atomicity guarantees that the new value is calculated based on up-to-date information; if the value had been updated by another thread in the meantime, the write would fail due to a mismatched expectedCurrentValue.
Refer to https://en.wikipedia.org/wiki/Compare-and-set [which may redirect you to the article on compare-and swap].
ptr | The target memory location. | |
[in,out] | expectedCurrentValue | A pointer to some location holding the expected current value of the data being set atomically. The computed 'desiredValue' should be a function of this current value. |
[in] | desiredValue | The new value computed based on '*expectedCurrentValue'. |
pseudocode: function cas(p : pointer to int, old : pointer to int, new : int) returns bool { if *p != *old { *old = *p return false } *p = new return true }
function incr(p : pointer to int, a : int) returns int { done = false value = atomic_load(p) while not done { done = atomic_cas(p, &value, value + a) // *value gets updated automatically until success } return value + a }
However, if the call is made in a loop like this, the atomic_compare_exchange_weak functions are to be preferred.
MBED_FORCEINLINE bool core_util_atomic_cas_explicit_u8 | ( | volatile uint8_t * | ptr, |
uint8_t * | expectedCurrentValue, | ||
uint8_t | desiredValue, | ||
mbed_memory_order | success, | ||
mbed_memory_order | failure | ||
) |
Atomic compare and set.
It compares the contents of a memory location to a given value and, only if they are the same, modifies the contents of that memory location to a given new value. This is done as a single atomic operation. The atomicity guarantees that the new value is calculated based on up-to-date information; if the value had been updated by another thread in the meantime, the write would fail due to a mismatched expectedCurrentValue.
Refer to https://en.wikipedia.org/wiki/Compare-and-set [which may redirect you to the article on compare-and swap].
ptr | The target memory location. | |
[in,out] | expectedCurrentValue | A pointer to some location holding the expected current value of the data being set atomically. The computed 'desiredValue' should be a function of this current value. |
[in] | desiredValue | The new value computed based on '*expectedCurrentValue'. |
pseudocode: function cas(p : pointer to int, old : pointer to int, new : int) returns bool { if *p != *old { *old = *p return false } *p = new return true }
function incr(p : pointer to int, a : int) returns int { done = false value = atomic_load(p) while not done { done = atomic_cas(p, &value, value + a) // *value gets updated automatically until success } return value + a }
However, if the call is made in a loop like this, the atomic_compare_exchange_weak functions are to be preferred.
success | memory ordering constraint for successful exchange |
failure | memory ordering constraint for failure |
bool core_util_atomic_cas_u16 | ( | volatile uint16_t * | ptr, |
uint16_t * | expectedCurrentValue, | ||
uint16_t | desiredValue | ||
) |
Atomic compare and set.
It compares the contents of a memory location to a given value and, only if they are the same, modifies the contents of that memory location to a given new value. This is done as a single atomic operation. The atomicity guarantees that the new value is calculated based on up-to-date information; if the value had been updated by another thread in the meantime, the write would fail due to a mismatched expectedCurrentValue.
Refer to https://en.wikipedia.org/wiki/Compare-and-set [which may redirect you to the article on compare-and swap].
ptr | The target memory location. | |
[in,out] | expectedCurrentValue | A pointer to some location holding the expected current value of the data being set atomically. The computed 'desiredValue' should be a function of this current value. |
[in] | desiredValue | The new value computed based on '*expectedCurrentValue'. |
pseudocode: function cas(p : pointer to int, old : pointer to int, new : int) returns bool { if *p != *old { *old = *p return false } *p = new return true }
function incr(p : pointer to int, a : int) returns int { done = false value = atomic_load(p) while not done { done = atomic_cas(p, &value, value + a) // *value gets updated automatically until success } return value + a }
However, if the call is made in a loop like this, the atomic_compare_exchange_weak functions are to be preferred.
MBED_FORCEINLINE bool core_util_atomic_cas_explicit_u16 | ( | volatile uint16_t * | ptr, |
uint16_t * | expectedCurrentValue, | ||
uint16_t | desiredValue, | ||
mbed_memory_order | success, | ||
mbed_memory_order | failure | ||
) |
bool core_util_atomic_cas_u32 | ( | volatile uint32_t * | ptr, |
uint32_t * | expectedCurrentValue, | ||
uint32_t | desiredValue | ||
) |
Atomic compare and set.
It compares the contents of a memory location to a given value and, only if they are the same, modifies the contents of that memory location to a given new value. This is done as a single atomic operation. The atomicity guarantees that the new value is calculated based on up-to-date information; if the value had been updated by another thread in the meantime, the write would fail due to a mismatched expectedCurrentValue.
Refer to https://en.wikipedia.org/wiki/Compare-and-set [which may redirect you to the article on compare-and swap].
ptr | The target memory location. | |
[in,out] | expectedCurrentValue | A pointer to some location holding the expected current value of the data being set atomically. The computed 'desiredValue' should be a function of this current value. |
[in] | desiredValue | The new value computed based on '*expectedCurrentValue'. |
pseudocode: function cas(p : pointer to int, old : pointer to int, new : int) returns bool { if *p != *old { *old = *p return false } *p = new return true }
function incr(p : pointer to int, a : int) returns int { done = false value = atomic_load(p) while not done { done = atomic_cas(p, &value, value + a) // *value gets updated automatically until success } return value + a }
However, if the call is made in a loop like this, the atomic_compare_exchange_weak functions are to be preferred.
MBED_FORCEINLINE bool core_util_atomic_cas_explicit_u32 | ( | volatile uint32_t * | ptr, |
uint32_t * | expectedCurrentValue, | ||
uint32_t | desiredValue, | ||
mbed_memory_order | success, | ||
mbed_memory_order | failure | ||
) |
bool core_util_atomic_cas_u64 | ( | volatile uint64_t * | ptr, |
uint64_t * | expectedCurrentValue, | ||
uint64_t | desiredValue | ||
) |
Atomic compare and set.
It compares the contents of a memory location to a given value and, only if they are the same, modifies the contents of that memory location to a given new value. This is done as a single atomic operation. The atomicity guarantees that the new value is calculated based on up-to-date information; if the value had been updated by another thread in the meantime, the write would fail due to a mismatched expectedCurrentValue.
Refer to https://en.wikipedia.org/wiki/Compare-and-set [which may redirect you to the article on compare-and swap].
ptr | The target memory location. | |
[in,out] | expectedCurrentValue | A pointer to some location holding the expected current value of the data being set atomically. The computed 'desiredValue' should be a function of this current value. |
[in] | desiredValue | The new value computed based on '*expectedCurrentValue'. |
pseudocode: function cas(p : pointer to int, old : pointer to int, new : int) returns bool { if *p != *old { *old = *p return false } *p = new return true }
function incr(p : pointer to int, a : int) returns int { done = false value = atomic_load(p) while not done { done = atomic_cas(p, &value, value + a) // *value gets updated automatically until success } return value + a }
However, if the call is made in a loop like this, the atomic_compare_exchange_weak functions are to be preferred.
MBED_FORCEINLINE bool core_util_atomic_cas_explicit_u64 | ( | volatile uint64_t * | ptr, |
uint64_t * | expectedCurrentValue, | ||
uint64_t | desiredValue, | ||
mbed_memory_order | success, | ||
mbed_memory_order | failure | ||
) |
MBED_FORCEINLINE bool core_util_atomic_cas_s8 | ( | volatile int8_t * | ptr, |
int8_t * | expectedCurrentValue, | ||
int8_t | desiredValue | ||
) |
Atomic compare and set.
It compares the contents of a memory location to a given value and, only if they are the same, modifies the contents of that memory location to a given new value. This is done as a single atomic operation. The atomicity guarantees that the new value is calculated based on up-to-date information; if the value had been updated by another thread in the meantime, the write would fail due to a mismatched expectedCurrentValue.
Refer to https://en.wikipedia.org/wiki/Compare-and-set [which may redirect you to the article on compare-and swap].
ptr | The target memory location. | |
[in,out] | expectedCurrentValue | A pointer to some location holding the expected current value of the data being set atomically. The computed 'desiredValue' should be a function of this current value. |
[in] | desiredValue | The new value computed based on '*expectedCurrentValue'. |
pseudocode: function cas(p : pointer to int, old : pointer to int, new : int) returns bool { if *p != *old { *old = *p return false } *p = new return true }
function incr(p : pointer to int, a : int) returns int { done = false value = atomic_load(p) while not done { done = atomic_cas(p, &value, value + a) // *value gets updated automatically until success } return value + a }
However, if the call is made in a loop like this, the atomic_compare_exchange_weak functions are to be preferred.
Definition at line 769 of file mbed_atomic_impl.h.
MBED_FORCEINLINE bool core_util_atomic_cas_explicit_s8 | ( | volatile int8_t * | ptr, |
int8_t * | expectedCurrentValue, | ||
int8_t | desiredValue, | ||
mbed_memory_order | success, | ||
mbed_memory_order | failure | ||
) |
Definition at line 769 of file mbed_atomic_impl.h.
MBED_FORCEINLINE bool core_util_atomic_cas_s16 | ( | volatile int16_t * | ptr, |
int16_t * | expectedCurrentValue, | ||
int16_t | desiredValue | ||
) |
Atomic compare and set.
It compares the contents of a memory location to a given value and, only if they are the same, modifies the contents of that memory location to a given new value. This is done as a single atomic operation. The atomicity guarantees that the new value is calculated based on up-to-date information; if the value had been updated by another thread in the meantime, the write would fail due to a mismatched expectedCurrentValue.
Refer to https://en.wikipedia.org/wiki/Compare-and-set [which may redirect you to the article on compare-and swap].
ptr | The target memory location. | |
[in,out] | expectedCurrentValue | A pointer to some location holding the expected current value of the data being set atomically. The computed 'desiredValue' should be a function of this current value. |
[in] | desiredValue | The new value computed based on '*expectedCurrentValue'. |
pseudocode: function cas(p : pointer to int, old : pointer to int, new : int) returns bool { if *p != *old { *old = *p return false } *p = new return true }
function incr(p : pointer to int, a : int) returns int { done = false value = atomic_load(p) while not done { done = atomic_cas(p, &value, value + a) // *value gets updated automatically until success } return value + a }
However, if the call is made in a loop like this, the atomic_compare_exchange_weak functions are to be preferred.
Definition at line 769 of file mbed_atomic_impl.h.
MBED_FORCEINLINE bool core_util_atomic_cas_explicit_s16 | ( | volatile int16_t * | ptr, |
int16_t * | expectedCurrentValue, | ||
int16_t | desiredValue, | ||
mbed_memory_order | success, | ||
mbed_memory_order | failure | ||
) |
Definition at line 769 of file mbed_atomic_impl.h.
MBED_FORCEINLINE bool core_util_atomic_cas_s32 | ( | volatile int32_t * | ptr, |
int32_t * | expectedCurrentValue, | ||
int32_t | desiredValue | ||
) |
Atomic compare and set.
It compares the contents of a memory location to a given value and, only if they are the same, modifies the contents of that memory location to a given new value. This is done as a single atomic operation. The atomicity guarantees that the new value is calculated based on up-to-date information; if the value had been updated by another thread in the meantime, the write would fail due to a mismatched expectedCurrentValue.
Refer to https://en.wikipedia.org/wiki/Compare-and-set [which may redirect you to the article on compare-and swap].
ptr | The target memory location. | |
[in,out] | expectedCurrentValue | A pointer to some location holding the expected current value of the data being set atomically. The computed 'desiredValue' should be a function of this current value. |
[in] | desiredValue | The new value computed based on '*expectedCurrentValue'. |
pseudocode: function cas(p : pointer to int, old : pointer to int, new : int) returns bool { if *p != *old { *old = *p return false } *p = new return true }
function incr(p : pointer to int, a : int) returns int { done = false value = atomic_load(p) while not done { done = atomic_cas(p, &value, value + a) // *value gets updated automatically until success } return value + a }
However, if the call is made in a loop like this, the atomic_compare_exchange_weak functions are to be preferred.
Definition at line 769 of file mbed_atomic_impl.h.
MBED_FORCEINLINE bool core_util_atomic_cas_explicit_s32 | ( | volatile int32_t * | ptr, |
int32_t * | expectedCurrentValue, | ||
int32_t | desiredValue, | ||
mbed_memory_order | success, | ||
mbed_memory_order | failure | ||
) |
Definition at line 769 of file mbed_atomic_impl.h.
MBED_FORCEINLINE bool core_util_atomic_cas_s64 | ( | volatile int64_t * | ptr, |
int64_t * | expectedCurrentValue, | ||
int64_t | desiredValue | ||
) |
Atomic compare and set.
It compares the contents of a memory location to a given value and, only if they are the same, modifies the contents of that memory location to a given new value. This is done as a single atomic operation. The atomicity guarantees that the new value is calculated based on up-to-date information; if the value had been updated by another thread in the meantime, the write would fail due to a mismatched expectedCurrentValue.
Refer to https://en.wikipedia.org/wiki/Compare-and-set [which may redirect you to the article on compare-and swap].
ptr | The target memory location. | |
[in,out] | expectedCurrentValue | A pointer to some location holding the expected current value of the data being set atomically. The computed 'desiredValue' should be a function of this current value. |
[in] | desiredValue | The new value computed based on '*expectedCurrentValue'. |
pseudocode: function cas(p : pointer to int, old : pointer to int, new : int) returns bool { if *p != *old { *old = *p return false } *p = new return true }
function incr(p : pointer to int, a : int) returns int { done = false value = atomic_load(p) while not done { done = atomic_cas(p, &value, value + a) // *value gets updated automatically until success } return value + a }
However, if the call is made in a loop like this, the atomic_compare_exchange_weak functions are to be preferred.
Definition at line 769 of file mbed_atomic_impl.h.
MBED_FORCEINLINE bool core_util_atomic_cas_explicit_s64 | ( | volatile int64_t * | ptr, |
int64_t * | expectedCurrentValue, | ||
int64_t | desiredValue, | ||
mbed_memory_order | success, | ||
mbed_memory_order | failure | ||
) |
Definition at line 769 of file mbed_atomic_impl.h.
MBED_FORCEINLINE bool core_util_atomic_cas_bool | ( | volatile bool * | ptr, |
bool * | expectedCurrentValue, | ||
bool | desiredValue | ||
) |
Atomic compare and set.
It compares the contents of a memory location to a given value and, only if they are the same, modifies the contents of that memory location to a given new value. This is done as a single atomic operation. The atomicity guarantees that the new value is calculated based on up-to-date information; if the value had been updated by another thread in the meantime, the write would fail due to a mismatched expectedCurrentValue.
Refer to https://en.wikipedia.org/wiki/Compare-and-set [which may redirect you to the article on compare-and swap].
ptr | The target memory location. | |
[in,out] | expectedCurrentValue | A pointer to some location holding the expected current value of the data being set atomically. The computed 'desiredValue' should be a function of this current value. |
[in] | desiredValue | The new value computed based on '*expectedCurrentValue'. |
pseudocode: function cas(p : pointer to int, old : pointer to int, new : int) returns bool { if *p != *old { *old = *p return false } *p = new return true }
function incr(p : pointer to int, a : int) returns int { done = false value = atomic_load(p) while not done { done = atomic_cas(p, &value, value + a) // *value gets updated automatically until success } return value + a }
However, if the call is made in a loop like this, the atomic_compare_exchange_weak functions are to be preferred.
Definition at line 772 of file mbed_atomic_impl.h.
MBED_FORCEINLINE bool core_util_atomic_cas_explicit_bool | ( | volatile bool * | ptr, |
bool * | expectedCurrentValue, | ||
bool | desiredValue, | ||
mbed_memory_order | success, | ||
mbed_memory_order | failure | ||
) |
Definition at line 777 of file mbed_atomic_impl.h.
bool core_util_atomic_cas_ptr | ( | void *volatile * | ptr, |
void ** | expectedCurrentValue, | ||
void * | desiredValue | ||
) |
Atomic compare and set.
It compares the contents of a memory location to a given value and, only if they are the same, modifies the contents of that memory location to a given new value. This is done as a single atomic operation. The atomicity guarantees that the new value is calculated based on up-to-date information; if the value had been updated by another thread in the meantime, the write would fail due to a mismatched expectedCurrentValue.
Refer to https://en.wikipedia.org/wiki/Compare-and-set [which may redirect you to the article on compare-and swap].
ptr | The target memory location. | |
[in,out] | expectedCurrentValue | A pointer to some location holding the expected current value of the data being set atomically. The computed 'desiredValue' should be a function of this current value. |
[in] | desiredValue | The new value computed based on '*expectedCurrentValue'. |
pseudocode: function cas(p : pointer to int, old : pointer to int, new : int) returns bool { if *p != *old { *old = *p return false } *p = new return true }
function incr(p : pointer to int, a : int) returns int { done = false value = atomic_load(p) while not done { done = atomic_cas(p, &value, value + a) // *value gets updated automatically until success } return value + a }
However, if the call is made in a loop like this, the atomic_compare_exchange_weak functions are to be preferred.
Definition at line 782 of file mbed_atomic_impl.h.
MBED_FORCEINLINE bool core_util_atomic_cas_explicit_ptr | ( | void *volatile * | ptr, |
void ** | expectedCurrentValue, | ||
void * | desiredValue, | ||
mbed_memory_order | success, | ||
mbed_memory_order | failure | ||
) |
Definition at line 797 of file mbed_atomic_impl.h.
bool core_util_atomic_compare_exchange_weak_u8 | ( | volatile uint8_t * | ptr, |
uint8_t * | expectedCurrentValue, | ||
uint8_t | desiredValue | ||
) |
Atomic compare and set.
It compares the contents of a memory location to a given value and, only if they are the same, modifies the contents of that memory location to a given new value. This is done as a single atomic operation. The atomicity guarantees that the new value is calculated based on up-to-date information; if the value had been updated by another thread in the meantime, the write would fail due to a mismatched expectedCurrentValue.
Refer to https://en.wikipedia.org/wiki/Compare-and-set [which may redirect you to the article on compare-and swap].
ptr | The target memory location. | |
[in,out] | expectedCurrentValue | A pointer to some location holding the expected current value of the data being set atomically. The computed 'desiredValue' should be a function of this current value. |
[in] | desiredValue | The new value computed based on '*expectedCurrentValue'. |
pseudocode: function cas(p : pointer to int, old : pointer to int, new : int) returns bool { if *p != *old or spurious failure { *old = *p return false } *p = new return true }
function incr(p : pointer to int, a : int) returns int { done = false value = *p // This fetch operation need not be atomic. while not done { done = atomic_compare_exchange_weak(p, &value, value + a) // *value gets updated automatically until success } return value + a }
MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_u8 | ( | volatile uint8_t * | ptr, |
uint8_t * | expectedCurrentValue, | ||
uint8_t | desiredValue, | ||
mbed_memory_order | success, | ||
mbed_memory_order | failure | ||
) |
Atomic compare and set.
It compares the contents of a memory location to a given value and, only if they are the same, modifies the contents of that memory location to a given new value. This is done as a single atomic operation. The atomicity guarantees that the new value is calculated based on up-to-date information; if the value had been updated by another thread in the meantime, the write would fail due to a mismatched expectedCurrentValue.
Refer to https://en.wikipedia.org/wiki/Compare-and-set [which may redirect you to the article on compare-and swap].
ptr | The target memory location. | |
[in,out] | expectedCurrentValue | A pointer to some location holding the expected current value of the data being set atomically. The computed 'desiredValue' should be a function of this current value. |
[in] | desiredValue | The new value computed based on '*expectedCurrentValue'. |
pseudocode: function cas(p : pointer to int, old : pointer to int, new : int) returns bool { if *p != *old or spurious failure { *old = *p return false } *p = new return true }
function incr(p : pointer to int, a : int) returns int { done = false value = *p // This fetch operation need not be atomic. while not done { done = atomic_compare_exchange_weak(p, &value, value + a) // *value gets updated automatically until success } return value + a }
success | memory ordering constraint for successful exchange |
failure | memory ordering constraint for failure |
bool core_util_atomic_compare_exchange_weak_u16 | ( | volatile uint16_t * | ptr, |
uint16_t * | expectedCurrentValue, | ||
uint16_t | desiredValue | ||
) |
Atomic compare and set.
It compares the contents of a memory location to a given value and, only if they are the same, modifies the contents of that memory location to a given new value. This is done as a single atomic operation. The atomicity guarantees that the new value is calculated based on up-to-date information; if the value had been updated by another thread in the meantime, the write would fail due to a mismatched expectedCurrentValue.
Refer to https://en.wikipedia.org/wiki/Compare-and-set [which may redirect you to the article on compare-and swap].
ptr | The target memory location. | |
[in,out] | expectedCurrentValue | A pointer to some location holding the expected current value of the data being set atomically. The computed 'desiredValue' should be a function of this current value. |
[in] | desiredValue | The new value computed based on '*expectedCurrentValue'. |
pseudocode: function cas(p : pointer to int, old : pointer to int, new : int) returns bool { if *p != *old or spurious failure { *old = *p return false } *p = new return true }
function incr(p : pointer to int, a : int) returns int { done = false value = *p // This fetch operation need not be atomic. while not done { done = atomic_compare_exchange_weak(p, &value, value + a) // *value gets updated automatically until success } return value + a }
MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_u16 | ( | volatile uint16_t * | ptr, |
uint16_t * | expectedCurrentValue, | ||
uint16_t | desiredValue, | ||
mbed_memory_order | success, | ||
mbed_memory_order | failure | ||
) |
bool core_util_atomic_compare_exchange_weak_u32 | ( | volatile uint32_t * | ptr, |
uint32_t * | expectedCurrentValue, | ||
uint32_t | desiredValue | ||
) |
Atomic compare and set.
It compares the contents of a memory location to a given value and, only if they are the same, modifies the contents of that memory location to a given new value. This is done as a single atomic operation. The atomicity guarantees that the new value is calculated based on up-to-date information; if the value had been updated by another thread in the meantime, the write would fail due to a mismatched expectedCurrentValue.
Refer to https://en.wikipedia.org/wiki/Compare-and-set [which may redirect you to the article on compare-and swap].
ptr | The target memory location. | |
[in,out] | expectedCurrentValue | A pointer to some location holding the expected current value of the data being set atomically. The computed 'desiredValue' should be a function of this current value. |
[in] | desiredValue | The new value computed based on '*expectedCurrentValue'. |
pseudocode: function cas(p : pointer to int, old : pointer to int, new : int) returns bool { if *p != *old or spurious failure { *old = *p return false } *p = new return true }
function incr(p : pointer to int, a : int) returns int { done = false value = *p // This fetch operation need not be atomic. while not done { done = atomic_compare_exchange_weak(p, &value, value + a) // *value gets updated automatically until success } return value + a }
MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_u32 | ( | volatile uint32_t * | ptr, |
uint32_t * | expectedCurrentValue, | ||
uint32_t | desiredValue, | ||
mbed_memory_order | success, | ||
mbed_memory_order | failure | ||
) |
bool core_util_atomic_compare_exchange_weak_u64 | ( | volatile uint64_t * | ptr, |
uint64_t * | expectedCurrentValue, | ||
uint64_t | desiredValue | ||
) |
Atomic compare and set.
It compares the contents of a memory location to a given value and, only if they are the same, modifies the contents of that memory location to a given new value. This is done as a single atomic operation. The atomicity guarantees that the new value is calculated based on up-to-date information; if the value had been updated by another thread in the meantime, the write would fail due to a mismatched expectedCurrentValue.
Refer to https://en.wikipedia.org/wiki/Compare-and-set [which may redirect you to the article on compare-and swap].
ptr | The target memory location. | |
[in,out] | expectedCurrentValue | A pointer to some location holding the expected current value of the data being set atomically. The computed 'desiredValue' should be a function of this current value. |
[in] | desiredValue | The new value computed based on '*expectedCurrentValue'. |
pseudocode: function cas(p : pointer to int, old : pointer to int, new : int) returns bool { if *p != *old or spurious failure { *old = *p return false } *p = new return true }
function incr(p : pointer to int, a : int) returns int { done = false value = *p // This fetch operation need not be atomic. while not done { done = atomic_compare_exchange_weak(p, &value, value + a) // *value gets updated automatically until success } return value + a }
MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_u64 | ( | volatile uint64_t * | ptr, |
uint64_t * | expectedCurrentValue, | ||
uint64_t | desiredValue, | ||
mbed_memory_order | success, | ||
mbed_memory_order | failure | ||
) |
MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_s8 | ( | volatile int8_t * | ptr, |
int8_t * | expectedCurrentValue, | ||
int8_t | desiredValue | ||
) |
Atomic compare and set.
It compares the contents of a memory location to a given value and, only if they are the same, modifies the contents of that memory location to a given new value. This is done as a single atomic operation. The atomicity guarantees that the new value is calculated based on up-to-date information; if the value had been updated by another thread in the meantime, the write would fail due to a mismatched expectedCurrentValue.
Refer to https://en.wikipedia.org/wiki/Compare-and-set [which may redirect you to the article on compare-and swap].
ptr | The target memory location. | |
[in,out] | expectedCurrentValue | A pointer to some location holding the expected current value of the data being set atomically. The computed 'desiredValue' should be a function of this current value. |
[in] | desiredValue | The new value computed based on '*expectedCurrentValue'. |
pseudocode: function cas(p : pointer to int, old : pointer to int, new : int) returns bool { if *p != *old or spurious failure { *old = *p return false } *p = new return true }
function incr(p : pointer to int, a : int) returns int { done = false value = *p // This fetch operation need not be atomic. while not done { done = atomic_compare_exchange_weak(p, &value, value + a) // *value gets updated automatically until success } return value + a }
Definition at line 770 of file mbed_atomic_impl.h.
MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_s8 | ( | volatile int8_t * | ptr, |
int8_t * | expectedCurrentValue, | ||
int8_t | desiredValue, | ||
mbed_memory_order | success, | ||
mbed_memory_order | failure | ||
) |
Definition at line 770 of file mbed_atomic_impl.h.
MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_s16 | ( | volatile int16_t * | ptr, |
int16_t * | expectedCurrentValue, | ||
int16_t | desiredValue | ||
) |
Atomic compare and set.
It compares the contents of a memory location to a given value and, only if they are the same, modifies the contents of that memory location to a given new value. This is done as a single atomic operation. The atomicity guarantees that the new value is calculated based on up-to-date information; if the value had been updated by another thread in the meantime, the write would fail due to a mismatched expectedCurrentValue.
Refer to https://en.wikipedia.org/wiki/Compare-and-set [which may redirect you to the article on compare-and swap].
ptr | The target memory location. | |
[in,out] | expectedCurrentValue | A pointer to some location holding the expected current value of the data being set atomically. The computed 'desiredValue' should be a function of this current value. |
[in] | desiredValue | The new value computed based on '*expectedCurrentValue'. |
pseudocode: function cas(p : pointer to int, old : pointer to int, new : int) returns bool { if *p != *old or spurious failure { *old = *p return false } *p = new return true }
function incr(p : pointer to int, a : int) returns int { done = false value = *p // This fetch operation need not be atomic. while not done { done = atomic_compare_exchange_weak(p, &value, value + a) // *value gets updated automatically until success } return value + a }
Definition at line 770 of file mbed_atomic_impl.h.
MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_s16 | ( | volatile int16_t * | ptr, |
int16_t * | expectedCurrentValue, | ||
int16_t | desiredValue, | ||
mbed_memory_order | success, | ||
mbed_memory_order | failure | ||
) |
Definition at line 770 of file mbed_atomic_impl.h.
MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_s32 | ( | volatile int32_t * | ptr, |
int32_t * | expectedCurrentValue, | ||
int32_t | desiredValue | ||
) |
Atomic compare and set.
It compares the contents of a memory location to a given value and, only if they are the same, modifies the contents of that memory location to a given new value. This is done as a single atomic operation. The atomicity guarantees that the new value is calculated based on up-to-date information; if the value had been updated by another thread in the meantime, the write would fail due to a mismatched expectedCurrentValue.
Refer to https://en.wikipedia.org/wiki/Compare-and-set [which may redirect you to the article on compare-and swap].
ptr | The target memory location. | |
[in,out] | expectedCurrentValue | A pointer to some location holding the expected current value of the data being set atomically. The computed 'desiredValue' should be a function of this current value. |
[in] | desiredValue | The new value computed based on '*expectedCurrentValue'. |
pseudocode: function cas(p : pointer to int, old : pointer to int, new : int) returns bool { if *p != *old or spurious failure { *old = *p return false } *p = new return true }
function incr(p : pointer to int, a : int) returns int { done = false value = *p // This fetch operation need not be atomic. while not done { done = atomic_compare_exchange_weak(p, &value, value + a) // *value gets updated automatically until success } return value + a }
Definition at line 770 of file mbed_atomic_impl.h.
MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_s32 | ( | volatile int32_t * | ptr, |
int32_t * | expectedCurrentValue, | ||
int32_t | desiredValue, | ||
mbed_memory_order | success, | ||
mbed_memory_order | failure | ||
) |
Definition at line 770 of file mbed_atomic_impl.h.
MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_s64 | ( | volatile int64_t * | ptr, |
int64_t * | expectedCurrentValue, | ||
int64_t | desiredValue | ||
) |
Atomic compare and set.
It compares the contents of a memory location to a given value and, only if they are the same, modifies the contents of that memory location to a given new value. This is done as a single atomic operation. The atomicity guarantees that the new value is calculated based on up-to-date information; if the value had been updated by another thread in the meantime, the write would fail due to a mismatched expectedCurrentValue.
Refer to https://en.wikipedia.org/wiki/Compare-and-set [which may redirect you to the article on compare-and swap].
ptr | The target memory location. | |
[in,out] | expectedCurrentValue | A pointer to some location holding the expected current value of the data being set atomically. The computed 'desiredValue' should be a function of this current value. |
[in] | desiredValue | The new value computed based on '*expectedCurrentValue'. |
pseudocode: function cas(p : pointer to int, old : pointer to int, new : int) returns bool { if *p != *old or spurious failure { *old = *p return false } *p = new return true }
function incr(p : pointer to int, a : int) returns int { done = false value = *p // This fetch operation need not be atomic. while not done { done = atomic_compare_exchange_weak(p, &value, value + a) // *value gets updated automatically until success } return value + a }
Definition at line 770 of file mbed_atomic_impl.h.
MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_s64 | ( | volatile int64_t * | ptr, |
int64_t * | expectedCurrentValue, | ||
int64_t | desiredValue, | ||
mbed_memory_order | success, | ||
mbed_memory_order | failure | ||
) |
Definition at line 770 of file mbed_atomic_impl.h.
MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_bool | ( | volatile bool * | ptr, |
bool * | expectedCurrentValue, | ||
bool | desiredValue | ||
) |
Atomic compare and set.
It compares the contents of a memory location to a given value and, only if they are the same, modifies the contents of that memory location to a given new value. This is done as a single atomic operation. The atomicity guarantees that the new value is calculated based on up-to-date information; if the value had been updated by another thread in the meantime, the write would fail due to a mismatched expectedCurrentValue.
Refer to https://en.wikipedia.org/wiki/Compare-and-set [which may redirect you to the article on compare-and swap].
ptr | The target memory location. | |
[in,out] | expectedCurrentValue | A pointer to some location holding the expected current value of the data being set atomically. The computed 'desiredValue' should be a function of this current value. |
[in] | desiredValue | The new value computed based on '*expectedCurrentValue'. |
pseudocode: function cas(p : pointer to int, old : pointer to int, new : int) returns bool { if *p != *old or spurious failure { *old = *p return false } *p = new return true }
function incr(p : pointer to int, a : int) returns int { done = false value = *p // This fetch operation need not be atomic. while not done { done = atomic_compare_exchange_weak(p, &value, value + a) // *value gets updated automatically until success } return value + a }
Definition at line 814 of file mbed_atomic_impl.h.
MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_bool | ( | volatile bool * | ptr, |
bool * | expectedCurrentValue, | ||
bool | desiredValue, | ||
mbed_memory_order | success, | ||
mbed_memory_order | failure | ||
) |
Definition at line 819 of file mbed_atomic_impl.h.
MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_ptr | ( | void *volatile * | ptr, |
void ** | expectedCurrentValue, | ||
void * | desiredValue | ||
) |
Atomic compare and set.
It compares the contents of a memory location to a given value and, only if they are the same, modifies the contents of that memory location to a given new value. This is done as a single atomic operation. The atomicity guarantees that the new value is calculated based on up-to-date information; if the value had been updated by another thread in the meantime, the write would fail due to a mismatched expectedCurrentValue.
Refer to https://en.wikipedia.org/wiki/Compare-and-set [which may redirect you to the article on compare-and swap].
ptr | The target memory location. | |
[in,out] | expectedCurrentValue | A pointer to some location holding the expected current value of the data being set atomically. The computed 'desiredValue' should be a function of this current value. |
[in] | desiredValue | The new value computed based on '*expectedCurrentValue'. |
pseudocode: function cas(p : pointer to int, old : pointer to int, new : int) returns bool { if *p != *old or spurious failure { *old = *p return false } *p = new return true }
function incr(p : pointer to int, a : int) returns int { done = false value = *p // This fetch operation need not be atomic. while not done { done = atomic_compare_exchange_weak(p, &value, value + a) // *value gets updated automatically until success } return value + a }
Definition at line 824 of file mbed_atomic_impl.h.
MBED_FORCEINLINE bool core_util_atomic_compare_exchange_weak_explicit_ptr | ( | void *volatile * | ptr, |
void ** | expectedCurrentValue, | ||
void * | desiredValue, | ||
mbed_memory_order | success, | ||
mbed_memory_order | failure | ||
) |
Definition at line 839 of file mbed_atomic_impl.h.
MBED_FORCEINLINE uint8_t core_util_atomic_load_u8 | ( | const volatile uint8_t * | valuePtr | ) |
Atomic load.
valuePtr | Target memory location. |
Definition at line 722 of file mbed_atomic_impl.h.
MBED_FORCEINLINE uint8_t core_util_atomic_load_explicit_u8 | ( | const volatile uint8_t * | valuePtr, |
mbed_memory_order | order | ||
) |
Atomic load with explicit ordering.
valuePtr | Target memory location. |
order | memory ordering constraint |
Definition at line 722 of file mbed_atomic_impl.h.
MBED_FORCEINLINE uint16_t core_util_atomic_load_u16 | ( | const volatile uint16_t * | valuePtr | ) |
Definition at line 723 of file mbed_atomic_impl.h.
MBED_FORCEINLINE uint16_t core_util_atomic_load_explicit_u16 | ( | const volatile uint16_t * | valuePtr, |
mbed_memory_order | order | ||
) |
Definition at line 723 of file mbed_atomic_impl.h.
MBED_FORCEINLINE uint32_t core_util_atomic_load_u32 | ( | const volatile uint32_t * | valuePtr | ) |
Definition at line 724 of file mbed_atomic_impl.h.
MBED_FORCEINLINE uint32_t core_util_atomic_load_explicit_u32 | ( | const volatile uint32_t * | valuePtr, |
mbed_memory_order | order | ||
) |
Definition at line 724 of file mbed_atomic_impl.h.
uint64_t core_util_atomic_load_u64 | ( | const volatile uint64_t * | valuePtr | ) |
MBED_FORCEINLINE uint64_t core_util_atomic_load_explicit_u64 | ( | const volatile uint64_t * | valuePtr, |
mbed_memory_order | order | ||
) |
MBED_FORCEINLINE int8_t core_util_atomic_load_s8 | ( | const volatile int8_t * | valuePtr | ) |
Definition at line 725 of file mbed_atomic_impl.h.
MBED_FORCEINLINE int8_t core_util_atomic_load_explicit_s8 | ( | const volatile int8_t * | valuePtr, |
mbed_memory_order | order | ||
) |
Definition at line 725 of file mbed_atomic_impl.h.
MBED_FORCEINLINE int16_t core_util_atomic_load_s16 | ( | const volatile int16_t * | valuePtr | ) |
Definition at line 726 of file mbed_atomic_impl.h.
MBED_FORCEINLINE int16_t core_util_atomic_load_explicit_s16 | ( | const volatile int16_t * | valuePtr, |
mbed_memory_order | order | ||
) |
Definition at line 726 of file mbed_atomic_impl.h.
MBED_FORCEINLINE int32_t core_util_atomic_load_s32 | ( | const volatile int32_t * | valuePtr | ) |
Definition at line 727 of file mbed_atomic_impl.h.
MBED_FORCEINLINE int32_t core_util_atomic_load_explicit_s32 | ( | const volatile int32_t * | valuePtr, |
mbed_memory_order | order | ||
) |
Definition at line 727 of file mbed_atomic_impl.h.
MBED_FORCEINLINE int64_t core_util_atomic_load_s64 | ( | const volatile int64_t * | valuePtr | ) |
Definition at line 737 of file mbed_atomic_impl.h.
MBED_FORCEINLINE int64_t core_util_atomic_load_explicit_s64 | ( | const volatile int64_t * | valuePtr, |
MBED_UNUSED mbed_memory_order | order | ||
) |
Atomic load with explicit ordering.
valuePtr | Target memory location. |
order | Currently unused since 64-bit atomic ops must be emulated |
Definition at line 981 of file mbed_atomic_impl.h.
MBED_FORCEINLINE bool core_util_atomic_load_bool | ( | const volatile bool * | valuePtr | ) |
Definition at line 728 of file mbed_atomic_impl.h.
MBED_FORCEINLINE bool core_util_atomic_load_explicit_bool | ( | const volatile bool * | valuePtr, |
mbed_memory_order | order | ||
) |
Definition at line 728 of file mbed_atomic_impl.h.
MBED_FORCEINLINE void * core_util_atomic_load_ptr | ( | void *const volatile * | valuePtr | ) |
Definition at line 729 of file mbed_atomic_impl.h.
MBED_FORCEINLINE void * core_util_atomic_load_explicit_ptr | ( | void *const volatile * | valuePtr, |
mbed_memory_order | order | ||
) |
Definition at line 729 of file mbed_atomic_impl.h.
MBED_FORCEINLINE void core_util_atomic_store_u8 | ( | volatile uint8_t * | valuePtr, |
uint8_t | desiredValue | ||
) |
Atomic store.
valuePtr | Target memory location. |
desiredValue | The value to store. |
Definition at line 722 of file mbed_atomic_impl.h.
MBED_FORCEINLINE void core_util_atomic_store_explicit_u8 | ( | volatile uint8_t * | valuePtr, |
uint8_t | desiredValue, | ||
mbed_memory_order | order | ||
) |
Atomic store with explicit ordering.
valuePtr | Target memory location. |
desiredValue | The value to store. |
order | memory ordering constraint |
Definition at line 722 of file mbed_atomic_impl.h.
MBED_FORCEINLINE void core_util_atomic_store_u16 | ( | volatile uint16_t * | valuePtr, |
uint16_t | desiredValue | ||
) |
Definition at line 723 of file mbed_atomic_impl.h.
MBED_FORCEINLINE void core_util_atomic_store_explicit_u16 | ( | volatile uint16_t * | valuePtr, |
uint16_t | desiredValue, | ||
mbed_memory_order | order | ||
) |
Definition at line 723 of file mbed_atomic_impl.h.
MBED_FORCEINLINE void core_util_atomic_store_u32 | ( | volatile uint32_t * | valuePtr, |
uint32_t | desiredValue | ||
) |
Definition at line 724 of file mbed_atomic_impl.h.
MBED_FORCEINLINE void core_util_atomic_store_explicit_u32 | ( | volatile uint32_t * | valuePtr, |
uint32_t | desiredValue, | ||
mbed_memory_order | order | ||
) |
Definition at line 724 of file mbed_atomic_impl.h.
void core_util_atomic_store_u64 | ( | volatile uint64_t * | valuePtr, |
uint64_t | desiredValue | ||
) |
MBED_FORCEINLINE void core_util_atomic_store_explicit_u64 | ( | volatile uint64_t * | valuePtr, |
uint64_t | desiredValue, | ||
mbed_memory_order | order | ||
) |
MBED_FORCEINLINE void core_util_atomic_store_s8 | ( | volatile int8_t * | valuePtr, |
int8_t | desiredValue | ||
) |
Definition at line 725 of file mbed_atomic_impl.h.
MBED_FORCEINLINE void core_util_atomic_store_explicit_s8 | ( | volatile int8_t * | valuePtr, |
int8_t | desiredValue, | ||
mbed_memory_order | order | ||
) |
Definition at line 725 of file mbed_atomic_impl.h.
MBED_FORCEINLINE void core_util_atomic_store_s16 | ( | volatile int16_t * | valuePtr, |
int16_t | desiredValue | ||
) |
Definition at line 726 of file mbed_atomic_impl.h.
MBED_FORCEINLINE void core_util_atomic_store_explicit_s16 | ( | volatile int16_t * | valuePtr, |
int16_t | desiredValue, | ||
mbed_memory_order | order | ||
) |
Definition at line 726 of file mbed_atomic_impl.h.
MBED_FORCEINLINE void core_util_atomic_store_s32 | ( | volatile int32_t * | valuePtr, |
int32_t | desiredValue | ||
) |
Definition at line 727 of file mbed_atomic_impl.h.
MBED_FORCEINLINE void core_util_atomic_store_explicit_s32 | ( | volatile int32_t * | valuePtr, |
int32_t | desiredValue, | ||
mbed_memory_order | order | ||
) |
Definition at line 727 of file mbed_atomic_impl.h.
MBED_FORCEINLINE void core_util_atomic_store_s64 | ( | volatile int64_t * | valuePtr, |
int64_t | desiredValue | ||
) |
Definition at line 742 of file mbed_atomic_impl.h.
MBED_FORCEINLINE void core_util_atomic_store_explicit_s64 | ( | volatile int64_t * | valuePtr, |
int64_t | desiredValue, | ||
mbed_memory_order | order | ||
) |
MBED_FORCEINLINE void core_util_atomic_store_bool | ( | volatile bool * | valuePtr, |
bool | desiredValue | ||
) |
Definition at line 728 of file mbed_atomic_impl.h.
MBED_FORCEINLINE void core_util_atomic_store_explicit_bool | ( | volatile bool * | valuePtr, |
bool | desiredValue, | ||
mbed_memory_order | order | ||
) |
Definition at line 728 of file mbed_atomic_impl.h.
MBED_FORCEINLINE void core_util_atomic_store_ptr | ( | void *volatile * | valuePtr, |
void * | desiredValue | ||
) |
Definition at line 729 of file mbed_atomic_impl.h.
MBED_FORCEINLINE void core_util_atomic_store_explicit_ptr | ( | void *volatile * | valuePtr, |
void * | desiredValue, | ||
mbed_memory_order | order | ||
) |
Definition at line 729 of file mbed_atomic_impl.h.
uint8_t core_util_atomic_exchange_u8 | ( | volatile uint8_t * | valuePtr, |
uint8_t | desiredValue | ||
) |
Atomic exchange.
valuePtr | Target memory location. |
desiredValue | The value to store. |
MBED_FORCEINLINE uint8_t core_util_atomic_exchange_explicit_u8 | ( | volatile uint8_t * | valuePtr, |
uint8_t | desiredValue, | ||
mbed_memory_order | order | ||
) |
Atomic exchange.
valuePtr | Target memory location. |
desiredValue | The value to store. |
order | memory ordering constraint |
uint16_t core_util_atomic_exchange_u16 | ( | volatile uint16_t * | valuePtr, |
uint16_t | desiredValue | ||
) |
Atomic exchange.
valuePtr | Target memory location. |
desiredValue | The value to store. |
MBED_FORCEINLINE uint16_t core_util_atomic_exchange_explicit_u16 | ( | volatile uint16_t * | valuePtr, |
uint16_t | desiredValue, | ||
mbed_memory_order | order | ||
) |
uint32_t core_util_atomic_exchange_u32 | ( | volatile uint32_t * | valuePtr, |
uint32_t | desiredValue | ||
) |
Atomic exchange.
valuePtr | Target memory location. |
desiredValue | The value to store. |
MBED_FORCEINLINE uint32_t core_util_atomic_exchange_explicit_u32 | ( | volatile uint32_t * | valuePtr, |
uint32_t | desiredValue, | ||
mbed_memory_order | order | ||
) |
uint64_t core_util_atomic_exchange_u64 | ( | volatile uint64_t * | valuePtr, |
uint64_t | desiredValue | ||
) |
Atomic exchange.
valuePtr | Target memory location. |
desiredValue | The value to store. |
MBED_FORCEINLINE uint64_t core_util_atomic_exchange_explicit_u64 | ( | volatile uint64_t * | valuePtr, |
uint64_t | desiredValue, | ||
mbed_memory_order | order | ||
) |
MBED_FORCEINLINE int8_t core_util_atomic_exchange_s8 | ( | volatile int8_t * | valuePtr, |
int8_t | desiredValue | ||
) |
Atomic exchange.
valuePtr | Target memory location. |
desiredValue | The value to store. |
Definition at line 880 of file mbed_atomic_impl.h.
MBED_FORCEINLINE int8_t core_util_atomic_exchange_explicit_s8 | ( | volatile int8_t * | valuePtr, |
int8_t | desiredValue, | ||
mbed_memory_order | order | ||
) |
Definition at line 886 of file mbed_atomic_impl.h.
MBED_FORCEINLINE int16_t core_util_atomic_exchange_s16 | ( | volatile int16_t * | valuePtr, |
int16_t | desiredValue | ||
) |
Atomic exchange.
valuePtr | Target memory location. |
desiredValue | The value to store. |
Definition at line 880 of file mbed_atomic_impl.h.
MBED_FORCEINLINE int16_t core_util_atomic_exchange_explicit_s16 | ( | volatile int16_t * | valuePtr, |
int16_t | desiredValue, | ||
mbed_memory_order | order | ||
) |
Definition at line 886 of file mbed_atomic_impl.h.
MBED_FORCEINLINE int32_t core_util_atomic_exchange_s32 | ( | volatile int32_t * | valuePtr, |
int32_t | desiredValue | ||
) |
Atomic exchange.
valuePtr | Target memory location. |
desiredValue | The value to store. |
Definition at line 880 of file mbed_atomic_impl.h.
MBED_FORCEINLINE int32_t core_util_atomic_exchange_explicit_s32 | ( | volatile int32_t * | valuePtr, |
int32_t | desiredValue, | ||
mbed_memory_order | order | ||
) |
Definition at line 886 of file mbed_atomic_impl.h.
MBED_FORCEINLINE int64_t core_util_atomic_exchange_s64 | ( | volatile int64_t * | valuePtr, |
int64_t | desiredValue | ||
) |
Atomic exchange.
valuePtr | Target memory location. |
desiredValue | The value to store. |
Definition at line 880 of file mbed_atomic_impl.h.
MBED_FORCEINLINE int64_t core_util_atomic_exchange_explicit_s64 | ( | volatile int64_t * | valuePtr, |
int64_t | desiredValue, | ||
mbed_memory_order | order | ||
) |
Definition at line 886 of file mbed_atomic_impl.h.
MBED_FORCEINLINE bool core_util_atomic_exchange_bool | ( | volatile bool * | valuePtr, |
bool | desiredValue | ||
) |
Atomic exchange.
valuePtr | Target memory location. |
desiredValue | The value to store. |
Definition at line 890 of file mbed_atomic_impl.h.
MBED_FORCEINLINE bool core_util_atomic_exchange_explicit_bool | ( | volatile bool * | valuePtr, |
bool | desiredValue, | ||
mbed_memory_order | order | ||
) |
Definition at line 895 of file mbed_atomic_impl.h.
void * core_util_atomic_exchange_ptr | ( | void *volatile * | valuePtr, |
void * | desiredValue | ||
) |
Atomic exchange.
valuePtr | Target memory location. |
desiredValue | The value to store. |
Definition at line 900 of file mbed_atomic_impl.h.
MBED_FORCEINLINE void * core_util_atomic_exchange_explicit_ptr | ( | void *volatile * | valuePtr, |
void * | desiredValue, | ||
mbed_memory_order | order | ||
) |
Definition at line 909 of file mbed_atomic_impl.h.
uint8_t core_util_atomic_incr_u8 | ( | volatile uint8_t * | valuePtr, |
uint8_t | delta | ||
) |
Atomic increment.
valuePtr | Target memory location being incremented. |
delta | The amount being incremented. |
uint16_t core_util_atomic_incr_u16 | ( | volatile uint16_t * | valuePtr, |
uint16_t | delta | ||
) |
Atomic increment.
valuePtr | Target memory location being incremented. |
delta | The amount being incremented. |
uint32_t core_util_atomic_incr_u32 | ( | volatile uint32_t * | valuePtr, |
uint32_t | delta | ||
) |
Atomic increment.
valuePtr | Target memory location being incremented. |
delta | The amount being incremented. |
uint64_t core_util_atomic_incr_u64 | ( | volatile uint64_t * | valuePtr, |
uint64_t | delta | ||
) |
Atomic increment.
valuePtr | Target memory location being incremented. |
delta | The amount being incremented. |
MBED_FORCEINLINE int8_t core_util_atomic_incr_s8 | ( | volatile int8_t * | valuePtr, |
int8_t | delta | ||
) |
Atomic increment.
valuePtr | Target memory location being incremented. |
delta | The amount being incremented. |
Definition at line 881 of file mbed_atomic_impl.h.
MBED_FORCEINLINE int16_t core_util_atomic_incr_s16 | ( | volatile int16_t * | valuePtr, |
int16_t | delta | ||
) |
Atomic increment.
valuePtr | Target memory location being incremented. |
delta | The amount being incremented. |
Definition at line 881 of file mbed_atomic_impl.h.
MBED_FORCEINLINE int32_t core_util_atomic_incr_s32 | ( | volatile int32_t * | valuePtr, |
int32_t | delta | ||
) |
Atomic increment.
valuePtr | Target memory location being incremented. |
delta | The amount being incremented. |
Definition at line 881 of file mbed_atomic_impl.h.
MBED_FORCEINLINE int64_t core_util_atomic_incr_s64 | ( | volatile int64_t * | valuePtr, |
int64_t | delta | ||
) |
Atomic increment.
valuePtr | Target memory location being incremented. |
delta | The amount being incremented. |
Definition at line 881 of file mbed_atomic_impl.h.
void * core_util_atomic_incr_ptr | ( | void *volatile * | valuePtr, |
ptrdiff_t | delta | ||
) |
Atomic increment.
valuePtr | Target memory location being incremented. |
delta | The amount being incremented. |
Definition at line 918 of file mbed_atomic_impl.h.
uint8_t core_util_atomic_decr_u8 | ( | volatile uint8_t * | valuePtr, |
uint8_t | delta | ||
) |
Atomic decrement.
valuePtr | Target memory location being decremented. |
delta | The amount being decremented. |
uint16_t core_util_atomic_decr_u16 | ( | volatile uint16_t * | valuePtr, |
uint16_t | delta | ||
) |
Atomic decrement.
valuePtr | Target memory location being decremented. |
delta | The amount being decremented. |
uint32_t core_util_atomic_decr_u32 | ( | volatile uint32_t * | valuePtr, |
uint32_t | delta | ||
) |
Atomic decrement.
valuePtr | Target memory location being decremented. |
delta | The amount being decremented. |
uint64_t core_util_atomic_decr_u64 | ( | volatile uint64_t * | valuePtr, |
uint64_t | delta | ||
) |
Atomic decrement.
valuePtr | Target memory location being decremented. |
delta | The amount being decremented. |
MBED_FORCEINLINE int8_t core_util_atomic_decr_s8 | ( | volatile int8_t * | valuePtr, |
int8_t | delta | ||
) |
Atomic decrement.
valuePtr | Target memory location being decremented. |
delta | The amount being decremented. |
Definition at line 882 of file mbed_atomic_impl.h.
MBED_FORCEINLINE int16_t core_util_atomic_decr_s16 | ( | volatile int16_t * | valuePtr, |
int16_t | delta | ||
) |
Atomic decrement.
valuePtr | Target memory location being decremented. |
delta | The amount being decremented. |
Definition at line 882 of file mbed_atomic_impl.h.
MBED_FORCEINLINE int32_t core_util_atomic_decr_s32 | ( | volatile int32_t * | valuePtr, |
int32_t | delta | ||
) |
Atomic decrement.
valuePtr | Target memory location being decremented. |
delta | The amount being decremented. |
Definition at line 882 of file mbed_atomic_impl.h.
MBED_FORCEINLINE int64_t core_util_atomic_decr_s64 | ( | volatile int64_t * | valuePtr, |
int64_t | delta | ||
) |
Atomic decrement.
valuePtr | Target memory location being decremented. |
delta | The amount being decremented. |
Definition at line 882 of file mbed_atomic_impl.h.
void * core_util_atomic_decr_ptr | ( | void *volatile * | valuePtr, |
ptrdiff_t | delta | ||
) |
Atomic decrement.
valuePtr | Target memory location being decremented. |
delta | The amount being decremented. |
Definition at line 927 of file mbed_atomic_impl.h.
uint8_t core_util_atomic_fetch_add_u8 | ( | volatile uint8_t * | valuePtr, |
uint8_t | arg | ||
) |
Atomic add.
valuePtr | Target memory location being modified. |
arg | The argument for the addition. |
MBED_FORCEINLINE uint8_t core_util_atomic_fetch_add_explicit_u8 | ( | volatile uint8_t * | valuePtr, |
uint8_t | arg, | ||
mbed_memory_order | order | ||
) |
Atomic add.
valuePtr | Target memory location being modified. |
arg | The argument for the addition. |
order | memory ordering constraint |
uint16_t core_util_atomic_fetch_add_u16 | ( | volatile uint16_t * | valuePtr, |
uint16_t | arg | ||
) |
Atomic add.
valuePtr | Target memory location being modified. |
arg | The argument for the addition. |
MBED_FORCEINLINE uint16_t core_util_atomic_fetch_add_explicit_u16 | ( | volatile uint16_t * | valuePtr, |
uint16_t | arg, | ||
mbed_memory_order | order | ||
) |
uint32_t core_util_atomic_fetch_add_u32 | ( | volatile uint32_t * | valuePtr, |
uint32_t | arg | ||
) |
Atomic add.
valuePtr | Target memory location being modified. |
arg | The argument for the addition. |
MBED_FORCEINLINE uint32_t core_util_atomic_fetch_add_explicit_u32 | ( | volatile uint32_t * | valuePtr, |
uint32_t | arg, | ||
mbed_memory_order | order | ||
) |
uint64_t core_util_atomic_fetch_add_u64 | ( | volatile uint64_t * | valuePtr, |
uint64_t | arg | ||
) |
Atomic add.
valuePtr | Target memory location being modified. |
arg | The argument for the addition. |
MBED_FORCEINLINE uint64_t core_util_atomic_fetch_add_explicit_u64 | ( | volatile uint64_t * | valuePtr, |
uint64_t | arg, | ||
mbed_memory_order | order | ||
) |
MBED_FORCEINLINE int8_t core_util_atomic_fetch_add_s8 | ( | volatile int8_t * | valuePtr, |
int8_t | arg | ||
) |
Atomic add.
valuePtr | Target memory location being modified. |
arg | The argument for the addition. |
Definition at line 883 of file mbed_atomic_impl.h.
MBED_FORCEINLINE int8_t core_util_atomic_fetch_add_explicit_s8 | ( | volatile int8_t * | valuePtr, |
int8_t | arg, | ||
mbed_memory_order | order | ||
) |
Definition at line 887 of file mbed_atomic_impl.h.
MBED_FORCEINLINE int16_t core_util_atomic_fetch_add_s16 | ( | volatile int16_t * | valuePtr, |
int16_t | arg | ||
) |
Atomic add.
valuePtr | Target memory location being modified. |
arg | The argument for the addition. |
Definition at line 883 of file mbed_atomic_impl.h.
MBED_FORCEINLINE int16_t core_util_atomic_fetch_add_explicit_s16 | ( | volatile int16_t * | valuePtr, |
int16_t | arg, | ||
mbed_memory_order | order | ||
) |
Definition at line 887 of file mbed_atomic_impl.h.
MBED_FORCEINLINE int32_t core_util_atomic_fetch_add_s32 | ( | volatile int32_t * | valuePtr, |
int32_t | arg | ||
) |
Atomic add.
valuePtr | Target memory location being modified. |
arg | The argument for the addition. |
Definition at line 883 of file mbed_atomic_impl.h.
MBED_FORCEINLINE int32_t core_util_atomic_fetch_add_explicit_s32 | ( | volatile int32_t * | valuePtr, |
int32_t | arg, | ||
mbed_memory_order | order | ||
) |
Definition at line 887 of file mbed_atomic_impl.h.
MBED_FORCEINLINE int64_t core_util_atomic_fetch_add_s64 | ( | volatile int64_t * | valuePtr, |
int64_t | arg | ||
) |
Atomic add.
valuePtr | Target memory location being modified. |
arg | The argument for the addition. |
Definition at line 883 of file mbed_atomic_impl.h.
MBED_FORCEINLINE int64_t core_util_atomic_fetch_add_explicit_s64 | ( | volatile int64_t * | valuePtr, |
int64_t | arg, | ||
mbed_memory_order | order | ||
) |
Definition at line 887 of file mbed_atomic_impl.h.
MBED_FORCEINLINE void * core_util_atomic_fetch_add_ptr | ( | void *volatile * | valuePtr, |
ptrdiff_t | arg | ||
) |
Atomic add.
valuePtr | Target memory location being modified. |
arg | The argument for the addition. |
Definition at line 936 of file mbed_atomic_impl.h.
MBED_FORCEINLINE void * core_util_atomic_fetch_add_explicit_ptr | ( | void *volatile * | valuePtr, |
ptrdiff_t | arg, | ||
mbed_memory_order | order | ||
) |
Definition at line 945 of file mbed_atomic_impl.h.
uint8_t core_util_atomic_fetch_sub_u8 | ( | volatile uint8_t * | valuePtr, |
uint8_t | arg | ||
) |
Atomic subtract.
valuePtr | Target memory location being modified. |
arg | The argument for the subtraction. |
MBED_FORCEINLINE uint8_t core_util_atomic_fetch_sub_explicit_u8 | ( | volatile uint8_t * | valuePtr, |
uint8_t | arg, | ||
mbed_memory_order | order | ||
) |
Atomic subtract.
valuePtr | Target memory location being modified. |
arg | The argument for the subtraction. |
order | memory ordering constraint |
uint16_t core_util_atomic_fetch_sub_u16 | ( | volatile uint16_t * | valuePtr, |
uint16_t | arg | ||
) |
Atomic subtract.
valuePtr | Target memory location being modified. |
arg | The argument for the subtraction. |
MBED_FORCEINLINE uint16_t core_util_atomic_fetch_sub_explicit_u16 | ( | volatile uint16_t * | valuePtr, |
uint16_t | arg, | ||
mbed_memory_order | order | ||
) |
uint32_t core_util_atomic_fetch_sub_u32 | ( | volatile uint32_t * | valuePtr, |
uint32_t | arg | ||
) |
Atomic subtract.
valuePtr | Target memory location being modified. |
arg | The argument for the subtraction. |
MBED_FORCEINLINE uint32_t core_util_atomic_fetch_sub_explicit_u32 | ( | volatile uint32_t * | valuePtr, |
uint32_t | arg, | ||
mbed_memory_order | order | ||
) |
uint64_t core_util_atomic_fetch_sub_u64 | ( | volatile uint64_t * | valuePtr, |
uint64_t | arg | ||
) |
Atomic subtract.
valuePtr | Target memory location being modified. |
arg | The argument for the subtraction. |
MBED_FORCEINLINE uint64_t core_util_atomic_fetch_sub_explicit_u64 | ( | volatile uint64_t * | valuePtr, |
uint64_t | arg, | ||
mbed_memory_order | order | ||
) |
MBED_FORCEINLINE int8_t core_util_atomic_fetch_sub_s8 | ( | volatile int8_t * | valuePtr, |
int8_t | arg | ||
) |
Atomic subtract.
valuePtr | Target memory location being modified. |
arg | The argument for the subtraction. |
Definition at line 884 of file mbed_atomic_impl.h.
MBED_FORCEINLINE int8_t core_util_atomic_fetch_sub_explicit_s8 | ( | volatile int8_t * | valuePtr, |
int8_t | arg, | ||
mbed_memory_order | order | ||
) |
Definition at line 888 of file mbed_atomic_impl.h.
MBED_FORCEINLINE int16_t core_util_atomic_fetch_sub_s16 | ( | volatile int16_t * | valuePtr, |
int16_t | arg | ||
) |
Atomic subtract.
valuePtr | Target memory location being modified. |
arg | The argument for the subtraction. |
Definition at line 884 of file mbed_atomic_impl.h.
MBED_FORCEINLINE int16_t core_util_atomic_fetch_sub_explicit_s16 | ( | volatile int16_t * | valuePtr, |
int16_t | arg, | ||
mbed_memory_order | order | ||
) |
Definition at line 888 of file mbed_atomic_impl.h.
MBED_FORCEINLINE int32_t core_util_atomic_fetch_sub_s32 | ( | volatile int32_t * | valuePtr, |
int32_t | arg | ||
) |
Atomic subtract.
valuePtr | Target memory location being modified. |
arg | The argument for the subtraction. |
Definition at line 884 of file mbed_atomic_impl.h.
MBED_FORCEINLINE int32_t core_util_atomic_fetch_sub_explicit_s32 | ( | volatile int32_t * | valuePtr, |
int32_t | arg, | ||
mbed_memory_order | order | ||
) |
Definition at line 888 of file mbed_atomic_impl.h.
MBED_FORCEINLINE int64_t core_util_atomic_fetch_sub_s64 | ( | volatile int64_t * | valuePtr, |
int64_t | arg | ||
) |
Atomic subtract.
valuePtr | Target memory location being modified. |
arg | The argument for the subtraction. |
Definition at line 884 of file mbed_atomic_impl.h.
MBED_FORCEINLINE int64_t core_util_atomic_fetch_sub_explicit_s64 | ( | volatile int64_t * | valuePtr, |
int64_t | arg, | ||
mbed_memory_order | order | ||
) |
Definition at line 888 of file mbed_atomic_impl.h.
MBED_FORCEINLINE void * core_util_atomic_fetch_sub_ptr | ( | void *volatile * | valuePtr, |
ptrdiff_t | arg | ||
) |
Atomic subtract.
valuePtr | Target memory location being modified. |
arg | The argument for the subtraction. |
Definition at line 954 of file mbed_atomic_impl.h.
MBED_FORCEINLINE void * core_util_atomic_fetch_sub_explicit_ptr | ( | void *volatile * | valuePtr, |
ptrdiff_t | arg, | ||
mbed_memory_order | order | ||
) |
Definition at line 963 of file mbed_atomic_impl.h.
uint8_t core_util_atomic_fetch_and_u8 | ( | volatile uint8_t * | valuePtr, |
uint8_t | arg | ||
) |
Atomic bitwise and.
valuePtr | Target memory location being modified. |
arg | The argument for the bitwise operation. |
MBED_FORCEINLINE uint8_t core_util_atomic_fetch_and_explicit_u8 | ( | volatile uint8_t * | valuePtr, |
uint8_t | arg, | ||
mbed_memory_order | order | ||
) |
Atomic bitwise and.
valuePtr | Target memory location being modified. |
arg | The argument for the bitwise operation. |
order | memory ordering constraint |
uint16_t core_util_atomic_fetch_and_u16 | ( | volatile uint16_t * | valuePtr, |
uint16_t | arg | ||
) |
Atomic bitwise and.
valuePtr | Target memory location being modified. |
arg | The argument for the bitwise operation. |
MBED_FORCEINLINE uint16_t core_util_atomic_fetch_and_explicit_u16 | ( | volatile uint16_t * | valuePtr, |
uint16_t | arg, | ||
mbed_memory_order | order | ||
) |
uint32_t core_util_atomic_fetch_and_u32 | ( | volatile uint32_t * | valuePtr, |
uint32_t | arg | ||
) |
Atomic bitwise and.
valuePtr | Target memory location being modified. |
arg | The argument for the bitwise operation. |
MBED_FORCEINLINE uint32_t core_util_atomic_fetch_and_explicit_u32 | ( | volatile uint32_t * | valuePtr, |
uint32_t | arg, | ||
mbed_memory_order | order | ||
) |
uint64_t core_util_atomic_fetch_and_u64 | ( | volatile uint64_t * | valuePtr, |
uint64_t | arg | ||
) |
Atomic bitwise and.
valuePtr | Target memory location being modified. |
arg | The argument for the bitwise operation. |
MBED_FORCEINLINE uint64_t core_util_atomic_fetch_and_explicit_u64 | ( | volatile uint64_t * | valuePtr, |
uint64_t | arg, | ||
mbed_memory_order | order | ||
) |
uint8_t core_util_atomic_fetch_or_u8 | ( | volatile uint8_t * | valuePtr, |
uint8_t | arg | ||
) |
Atomic bitwise inclusive or.
valuePtr | Target memory location being modified. |
arg | The argument for the bitwise operation. |
MBED_FORCEINLINE uint8_t core_util_atomic_fetch_or_explicit_u8 | ( | volatile uint8_t * | valuePtr, |
uint8_t | arg, | ||
mbed_memory_order | order | ||
) |
Atomic bitwise inclusive or.
valuePtr | Target memory location being modified. |
arg | The argument for the bitwise operation. |
order | memory ordering constraint |
uint16_t core_util_atomic_fetch_or_u16 | ( | volatile uint16_t * | valuePtr, |
uint16_t | arg | ||
) |
Atomic bitwise inclusive or.
valuePtr | Target memory location being modified. |
arg | The argument for the bitwise operation. |
MBED_FORCEINLINE uint16_t core_util_atomic_fetch_or_explicit_u16 | ( | volatile uint16_t * | valuePtr, |
uint16_t | arg, | ||
mbed_memory_order | order | ||
) |
uint32_t core_util_atomic_fetch_or_u32 | ( | volatile uint32_t * | valuePtr, |
uint32_t | arg | ||
) |
Atomic bitwise inclusive or.
valuePtr | Target memory location being modified. |
arg | The argument for the bitwise operation. |
MBED_FORCEINLINE uint32_t core_util_atomic_fetch_or_explicit_u32 | ( | volatile uint32_t * | valuePtr, |
uint32_t | arg, | ||
mbed_memory_order | order | ||
) |
uint64_t core_util_atomic_fetch_or_u64 | ( | volatile uint64_t * | valuePtr, |
uint64_t | arg | ||
) |
Atomic bitwise inclusive or.
valuePtr | Target memory location being modified. |
arg | The argument for the bitwise operation. |
MBED_FORCEINLINE uint64_t core_util_atomic_fetch_or_explicit_u64 | ( | volatile uint64_t * | valuePtr, |
uint64_t | arg, | ||
mbed_memory_order | order | ||
) |
uint8_t core_util_atomic_fetch_xor_u8 | ( | volatile uint8_t * | valuePtr, |
uint8_t | arg | ||
) |
Atomic bitwise exclusive or.
valuePtr | Target memory location being modified. |
arg | The argument for the bitwise operation. |
MBED_FORCEINLINE uint8_t core_util_atomic_fetch_xor_explicit_u8 | ( | volatile uint8_t * | valuePtr, |
uint8_t | arg, | ||
mbed_memory_order | order | ||
) |
Atomic bitwise exclusive or.
valuePtr | Target memory location being modified. |
arg | The argument for the bitwise operation. |
order | memory ordering constraint |
uint16_t core_util_atomic_fetch_xor_u16 | ( | volatile uint16_t * | valuePtr, |
uint16_t | arg | ||
) |
Atomic bitwise exclusive or.
valuePtr | Target memory location being modified. |
arg | The argument for the bitwise operation. |
MBED_FORCEINLINE uint16_t core_util_atomic_fetch_xor_explicit_u16 | ( | volatile uint16_t * | valuePtr, |
uint16_t | arg, | ||
mbed_memory_order | order | ||
) |
uint32_t core_util_atomic_fetch_xor_u32 | ( | volatile uint32_t * | valuePtr, |
uint32_t | arg | ||
) |
Atomic bitwise exclusive or.
valuePtr | Target memory location being modified. |
arg | The argument for the bitwise operation. |
MBED_FORCEINLINE uint32_t core_util_atomic_fetch_xor_explicit_u32 | ( | volatile uint32_t * | valuePtr, |
uint32_t | arg, | ||
mbed_memory_order | order | ||
) |
uint64_t core_util_atomic_fetch_xor_u64 | ( | volatile uint64_t * | valuePtr, |
uint64_t | arg | ||
) |
Atomic bitwise exclusive or.
valuePtr | Target memory location being modified. |
arg | The argument for the bitwise operation. |
MBED_FORCEINLINE uint64_t core_util_atomic_fetch_xor_explicit_u64 | ( | volatile uint64_t * | valuePtr, |
uint64_t | arg, | ||
mbed_memory_order | order | ||
) |
|
noexcept |
|
noexcept |
|
noexcept |
|
noexcept |
|
noexcept |
Atomic exchange.
valuePtr | Target memory location. |
desiredValue | The value to store. |
|
noexcept |
Atomic compare and set.
It compares the contents of a memory location to a given value and, only if they are the same, modifies the contents of that memory location to a given new value. This is done as a single atomic operation. The atomicity guarantees that the new value is calculated based on up-to-date information; if the value had been updated by another thread in the meantime, the write would fail due to a mismatched expectedCurrentValue.
Refer to https://en.wikipedia.org/wiki/Compare-and-set [which may redirect you to the article on compare-and swap].
ptr | The target memory location. | |
[in,out] | expectedCurrentValue | A pointer to some location holding the expected current value of the data being set atomically. The computed 'desiredValue' should be a function of this current value. |
[in] | desiredValue | The new value computed based on '*expectedCurrentValue'. |
pseudocode: function cas(p : pointer to int, old : pointer to int, new : int) returns bool { if *p != *old { *old = *p return false } *p = new return true }
function incr(p : pointer to int, a : int) returns int { done = false value = atomic_load(p) while not done { done = atomic_cas(p, &value, value + a) // *value gets updated automatically until success } return value + a }
However, if the call is made in a loop like this, the atomic_compare_exchange_weak functions are to be preferred.
|
noexcept |
Atomic compare and set.
It compares the contents of a memory location to a given value and, only if they are the same, modifies the contents of that memory location to a given new value. This is done as a single atomic operation. The atomicity guarantees that the new value is calculated based on up-to-date information; if the value had been updated by another thread in the meantime, the write would fail due to a mismatched expectedCurrentValue.
Refer to https://en.wikipedia.org/wiki/Compare-and-set [which may redirect you to the article on compare-and swap].
ptr | The target memory location. | |
[in,out] | expectedCurrentValue | A pointer to some location holding the expected current value of the data being set atomically. The computed 'desiredValue' should be a function of this current value. |
[in] | desiredValue | The new value computed based on '*expectedCurrentValue'. |
pseudocode: function cas(p : pointer to int, old : pointer to int, new : int) returns bool { if *p != *old or spurious failure { *old = *p return false } *p = new return true }
function incr(p : pointer to int, a : int) returns int { done = false value = *p // This fetch operation need not be atomic. while not done { done = atomic_compare_exchange_weak(p, &value, value + a) // *value gets updated automatically until success } return value + a }
|
noexcept |
Atomic add.
valuePtr | Target memory location being modified. |
arg | The argument for the addition. |
|
noexcept |
Atomic subtract.
valuePtr | Target memory location being modified. |
arg | The argument for the subtraction. |
|
noexcept |
Atomic bitwise and.
valuePtr | Target memory location being modified. |
arg | The argument for the bitwise operation. |
|
noexcept |
Atomic bitwise inclusive or.
valuePtr | Target memory location being modified. |
arg | The argument for the bitwise operation. |
|
noexcept |
Atomic bitwise exclusive or.
valuePtr | Target memory location being modified. |
arg | The argument for the bitwise operation. |
|
noexcept |
|
noexcept |
|
noexcept |
|
noexcept |
|
noexcept |
|
noexcept |
|
noexcept |
|
noexcept |
|
noexcept |
|
noexcept |
|
noexcept |
|
noexcept |
|
noexcept |
Definition at line 1060 of file mbed_atomic_impl.h.
|
noexcept |
Definition at line 1066 of file mbed_atomic_impl.h.
|
noexcept |
|
noexcept |
|
noexcept |
Atomic exchange.
valuePtr | Target memory location. |
desiredValue | The value to store. |
|
noexcept |
Atomic compare and set.
It compares the contents of a memory location to a given value and, only if they are the same, modifies the contents of that memory location to a given new value. This is done as a single atomic operation. The atomicity guarantees that the new value is calculated based on up-to-date information; if the value had been updated by another thread in the meantime, the write would fail due to a mismatched expectedCurrentValue.
Refer to https://en.wikipedia.org/wiki/Compare-and-set [which may redirect you to the article on compare-and swap].
ptr | The target memory location. | |
[in,out] | expectedCurrentValue | A pointer to some location holding the expected current value of the data being set atomically. The computed 'desiredValue' should be a function of this current value. |
[in] | desiredValue | The new value computed based on '*expectedCurrentValue'. |
pseudocode: function cas(p : pointer to int, old : pointer to int, new : int) returns bool { if *p != *old { *old = *p return false } *p = new return true }
function incr(p : pointer to int, a : int) returns int { done = false value = atomic_load(p) while not done { done = atomic_cas(p, &value, value + a) // *value gets updated automatically until success } return value + a }
However, if the call is made in a loop like this, the atomic_compare_exchange_weak functions are to be preferred.
|
noexcept |
Atomic compare and set.
It compares the contents of a memory location to a given value and, only if they are the same, modifies the contents of that memory location to a given new value. This is done as a single atomic operation. The atomicity guarantees that the new value is calculated based on up-to-date information; if the value had been updated by another thread in the meantime, the write would fail due to a mismatched expectedCurrentValue.
Refer to https://en.wikipedia.org/wiki/Compare-and-set [which may redirect you to the article on compare-and swap].
ptr | The target memory location. | |
[in,out] | expectedCurrentValue | A pointer to some location holding the expected current value of the data being set atomically. The computed 'desiredValue' should be a function of this current value. |
[in] | desiredValue | The new value computed based on '*expectedCurrentValue'. |
pseudocode: function cas(p : pointer to int, old : pointer to int, new : int) returns bool { if *p != *old or spurious failure { *old = *p return false } *p = new return true }
function incr(p : pointer to int, a : int) returns int { done = false value = *p // This fetch operation need not be atomic. while not done { done = atomic_compare_exchange_weak(p, &value, value + a) // *value gets updated automatically until success } return value + a }
|
noexcept |
Atomic add.
valuePtr | Target memory location being modified. |
arg | The argument for the addition. |
Definition at line 1225 of file mbed_atomic_impl.h.
|
noexcept |
Atomic subtract.
valuePtr | Target memory location being modified. |
arg | The argument for the subtraction. |
Definition at line 1237 of file mbed_atomic_impl.h.
|
noexcept |
Definition at line 1072 of file mbed_atomic_impl.h.
|
noexcept |
Definition at line 1078 of file mbed_atomic_impl.h.
|
noexcept |
|
noexcept |
|
noexcept |
|
noexcept |
|
noexcept |
|
noexcept |
Definition at line 1231 of file mbed_atomic_impl.h.
|
noexcept |
Definition at line 1243 of file mbed_atomic_impl.h.