#include <amino/util.h>Go to the source code of this file.
Defines | |
| #define | _ATOMIC_LOAD_(__a__, __x__) |
| #define | _ATOMIC_STORE_(__a__, __m__, __x__) |
| #define | _ATOMIC_FENCE_(__a__, __x__) |
| #define | _ATOMIC_CMPSWP_(__a__, __e__, __m__, __x__) |
| #define | _ATOMIC_MODIFY_(__a__, __o__, __m__, __x__) |
| #define | ATOMIC_INTEGRAL_LOCK_FREE 2 |
| #define | ATOMIC_ADDRESS_LOCK_FREE 2 |
Functions | |
| bool | atomic_flag_test_and_set_explicit (volatile atomic_flag *__a__, memory_order __x__) |
| bool | atomic_flag_test_and_set (volatile atomic_flag *__a__) |
| void | atomic_flag_clear_explicit (volatile atomic_flag *__a__, memory_order __x__) |
| void | atomic_flag_clear (volatile atomic_flag *__a__) |
| void | atomic_flag_fence (const volatile atomic_flag *__a__, memory_order __x__) |
| #define _ATOMIC_CMPSWP_ | ( | __a__, | |||
| __e__, | |||||
| __m__, | |||||
| __x__ | ) |
Value:
({ \
__typeof__(__m__) __v__ = (__m__); \
bool __r__; \
__r__ = cas(__a__, __e__, (unsigned long)__v__, sizeof(__v__)); \
__r__; })
| #define _ATOMIC_FENCE_ | ( | __a__, | |||
| __x__ | ) |
Value:
({ if(__x__ == memory_order_acquire || __x__ == memory_order_release) \
compiler_barrier(); \
else if(__x__ == memory_order_seq_cst) \
SC_FENCE; \
else if(__x__ == memory_order_acq_rel) \
CC_FENCE; \
})
| #define _ATOMIC_LOAD_ | ( | __a__, | |||
| __x__ | ) |
Value:
({\
if(__x__ != memory_order_relaxed) \
compiler_barrier();\
__typeof__((__a__)->__f__) __r__ = (__a__)->__f__; \
__r__; })
| #define _ATOMIC_MODIFY_ | ( | __a__, | |||
| __o__, | |||||
| __m__, | |||||
| __x__ | ) |
Value:
({ volatile __typeof__((__a__)->__f__)* __p__ = &((__a__)->__f__); \
__typeof__(__m__) __oldValue__; \
__typeof__((__a__)->__f__) __newValue__; \
do { \
__oldValue__ = *__p__; \
__newValue__ = __oldValue__; \
switch(__o__){\
case ops_swap:\
__newValue__ = __m__; \
break; \
case ops_add:\
__newValue__ += __m__; \
break; \
case ops_sub: \
__newValue__ -= __m__; \
break; \
case ops_and:\
__newValue__ &= __m__; \
break; \
case ops_or:\
__newValue__ |= __m__; \
break; \
case ops_xor:\
__newValue__ ^= __m__; \
break; \
}\
} while(!cas(__a__, &__oldValue__, (unsigned long)__newValue__, sizeof(__newValue__)));\
__oldValue__; })
| #define _ATOMIC_STORE_ | ( | __a__, | |||
| __m__, | |||||
| __x__ | ) |
Value:
({\
if(__x__ != memory_order_relaxed) \
compiler_barrier();\
if(__x__ == memory_order_seq_cst)\
__asm__ __volatile(\
"XCHG %0, %1"\
:"=q"(__a__->__f__)\
:"m"(__m__)\
);\
else\
(__a__)->__f__ = __m__; \
__m__; })
| #define ATOMIC_ADDRESS_LOCK_FREE 2 |
| #define ATOMIC_INTEGRAL_LOCK_FREE 2 |
| void atomic_flag_clear | ( | volatile atomic_flag * | __a__ | ) |
| void atomic_flag_clear_explicit | ( | volatile atomic_flag * | __a__, | |
| memory_order | __x__ | |||
| ) |
| void atomic_flag_fence | ( | const volatile atomic_flag * | __a__, | |
| memory_order | __x__ | |||
| ) |
| bool atomic_flag_test_and_set | ( | volatile atomic_flag * | __a__ | ) |
| bool atomic_flag_test_and_set_explicit | ( | volatile atomic_flag * | __a__, | |
| memory_order | __x__ | |||
| ) |
1.5.6