00001 
00002 
00003 
00004 
00005 
00006 
00007 
00008 
00009 
00010 
00011 
00012 
00013 
00014 
00015 
00016 
00017 #ifdef __cplusplus
00018 extern "C" {
00019 #endif
00020     extern bool atomic_flag_test_and_set( volatile atomic_flag* );
00021     extern bool atomic_flag_test_and_set_explicit
00022         ( volatile atomic_flag*, memory_order );
00023     extern void atomic_flag_clear( volatile atomic_flag* );
00024     extern void atomic_flag_clear_explicit
00025         ( volatile atomic_flag*, memory_order );
00026     extern void atomic_flag_fence
00027         ( const volatile atomic_flag*, memory_order );
00028     extern void __atomic_flag_wait__
00029         ( volatile atomic_flag* );
00030     extern void __atomic_flag_wait_explicit__
00031         ( volatile atomic_flag*, memory_order );
00032     extern volatile atomic_flag* __atomic_flag_for_address__
00033         ( const volatile void* __z__ )
00034         __attribute__((const));
00035 
00036 #ifdef __cplusplus
00037 }
00038 #endif
00039 
00040 #ifdef __cplusplus
00041 
00042 inline bool atomic_flag::test_and_set( memory_order __x__ ) volatile
00043 { return atomic_flag_test_and_set_explicit( this, __x__ ); }
00044 
00045 inline void atomic_flag::clear( memory_order __x__ ) volatile
00046 { atomic_flag_clear_explicit( this, __x__ ); }
00047 
00048 inline void atomic_flag::fence( memory_order __x__ ) const volatile
00049 { atomic_flag_fence( this, __x__ ); }
00050 
00051 #endif
00052 
00053 
00054 
00055 #define _ATOMIC_LOAD_( __a__, __x__ ) \
00056     ({ volatile __typeof__((__a__)->__f__)* __p__ = &((__a__)->__f__); \
00057      volatile atomic_flag* __g__ = __atomic_flag_for_address__( __p__ ); \
00058      __atomic_flag_wait_explicit__( __g__, __x__ ); \
00059      __typeof__((__a__)->__f__) __r__ = *__p__; \
00060      atomic_flag_clear_explicit( __g__, __x__ ); \
00061      __r__; })
00062 
00063 #define _ATOMIC_STORE_( __a__, __m__, __x__ ) \
00064     ({ volatile __typeof__((__a__)->__f__)* __p__ = &((__a__)->__f__); \
00065      __typeof__(__m__) __v__ = (__m__); \
00066      volatile atomic_flag* __g__ = __atomic_flag_for_address__( __p__ ); \
00067      __atomic_flag_wait_explicit__( __g__, __x__ ); \
00068      *__p__ = __v__; \
00069      atomic_flag_clear_explicit( __g__, __x__ ); \
00070      __v__; })
00071 
00072 #define _ATOMIC_MODIFY_( __a__, __o__, __m__, __x__ ) \
00073     ({ volatile __typeof__((__a__)->__f__)* __p__ = &((__a__)->__f__); \
00074      __typeof__(__m__) __v__ = (__m__); \
00075      volatile atomic_flag* __g__ = __atomic_flag_for_address__( __p__ ); \
00076      __atomic_flag_wait_explicit__( __g__, __x__ ); \
00077      __typeof__((__a__)->__f__) __r__ = *__p__; \
00078      *__p__ __o__ __v__; \
00079      atomic_flag_clear_explicit( __g__, __x__ ); \
00080      __r__; })
00081 
00082 #define _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ) \
00083     ({ volatile __typeof__((__a__)->__f__)* __p__ = &((__a__)->__f__); \
00084      __typeof__(__e__) __q__ = (__e__); \
00085      __typeof__(__m__) __v__ = (__m__); \
00086      bool __r__; \
00087      volatile atomic_flag* __g__ = __atomic_flag_for_address__( __p__ ); \
00088      __atomic_flag_wait_explicit__( __g__, __x__ ); \
00089      __typeof__((__a__)->__f__) __t__ = *__p__; \
00090      if ( __t__ == *__q__ ) { *__p__ = __v__; __r__ = true; } \
00091      else { *__q__ = __t__; __r__ = false; } \
00092      atomic_flag_clear_explicit( __g__, __x__ ); \
00093      __r__; })
00094 
00095 #define _ATOMIC_FENCE_( __a__, __x__ ) \
00096     ({ volatile __typeof__((__a__)->__f__)* __p__ = &((__a__)->__f__); \
00097      volatile atomic_flag* __g__ = __atomic_flag_for_address__( __p__ ); \
00098      atomic_flag_fence( __g__, __x__ ); \
00099      })
00100 
00101 #define ATOMIC_INTEGRAL_LOCK_FREE 0
00102 #define ATOMIC_ADDRESS_LOCK_FREE 0
00103 
00104 
00105 
00106 #define atomic_fetch_add_explicit( __a__, __m__, __x__ ) \
00107 _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ )
00108 
00109 #define atomic_fetch_add( __a__, __m__ ) \
00110 _ATOMIC_MODIFY_( __a__, +=, __m__, memory_order_seq_cst )
00111 
00112 
00113 #define atomic_fetch_sub_explicit( __a__, __m__, __x__ ) \
00114 _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ )
00115 
00116 #define atomic_fetch_sub( __a__, __m__ ) \
00117 _ATOMIC_MODIFY_( __a__, -=, __m__, memory_order_seq_cst )
00118 
00119 
00120 #define atomic_fetch_and_explicit( __a__, __m__, __x__ ) \
00121 _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ )
00122 
00123 #define atomic_fetch_and( __a__, __m__ ) \
00124 _ATOMIC_MODIFY_( __a__, &=, __m__, memory_order_seq_cst )
00125 
00126 
00127 #define atomic_fetch_or_explicit( __a__, __m__, __x__ ) \
00128 _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ )
00129 
00130 #define atomic_fetch_or( __a__, __m__ ) \
00131 _ATOMIC_MODIFY_( __a__, |=, __m__, memory_order_seq_cst )
00132 
00133 
00134 #define atomic_fetch_xor_explicit( __a__, __m__, __x__ ) \
00135 _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ )
00136 
00137 #define atomic_fetch_xor( __a__, __m__ ) \
00138 _ATOMIC_MODIFY_( __a__, ^=, __m__, memory_order_seq_cst )
00139 
00140 
00141 inline void* atomic_fetch_add_explicit
00142 ( volatile atomic_address* __a__, ptrdiff_t __m__, memory_order __x__ )
00143 { void* volatile* __p__ = &((__a__)->__f__);
00144   volatile atomic_flag* __g__ = __atomic_flag_for_address__( __p__ );
00145   __atomic_flag_wait_explicit__( __g__, __x__ );
00146   void* __r__ = *__p__;
00147   *__p__ = (void*)((char*)(*__p__) + __m__);
00148   atomic_flag_clear_explicit( __g__, __x__ );
00149   return __r__; }
00150 
00151 inline void* atomic_fetch_add
00152 ( volatile atomic_address* __a__, ptrdiff_t __m__ )
00153 { return atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
00154 
00155 
00156 inline void* atomic_fetch_sub_explicit
00157 ( volatile atomic_address* __a__, ptrdiff_t __m__, memory_order __x__ )
00158 { void* volatile* __p__ = &((__a__)->__f__);
00159   volatile atomic_flag* __g__ = __atomic_flag_for_address__( __p__ );
00160   __atomic_flag_wait_explicit__( __g__, __x__ );
00161   void* __r__ = *__p__;
00162   *__p__ = (void*)((char*)(*__p__) - __m__);
00163   atomic_flag_clear_explicit( __g__, __x__ );
00164   return __r__; }
00165 
00166 inline void* atomic_fetch_sub
00167 ( volatile atomic_address* __a__, ptrdiff_t __m__ )
00168 { return atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
00169 
00170 
00171 inline char atomic_fetch_add_explicit
00172 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
00173 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
00174 
00175 inline char atomic_fetch_add
00176 ( volatile atomic_char* __a__, char __m__ )
00177 { atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
00178 
00179 
00180 inline char atomic_fetch_sub_explicit
00181 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
00182 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
00183 
00184 inline char atomic_fetch_sub
00185 ( volatile atomic_char* __a__, char __m__ )
00186 { atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
00187 
00188 
00189 inline char atomic_fetch_and_explicit
00190 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
00191 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
00192 
00193 inline char atomic_fetch_and
00194 ( volatile atomic_char* __a__, char __m__ )
00195 { atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
00196 
00197 
00198 inline char atomic_fetch_or_explicit
00199 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
00200 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
00201 
00202 inline char atomic_fetch_or
00203 ( volatile atomic_char* __a__, char __m__ )
00204 { atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
00205 
00206 
00207 inline char atomic_fetch_xor_explicit
00208 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
00209 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
00210 
00211 inline char atomic_fetch_xor
00212 ( volatile atomic_char* __a__, char __m__ )
00213 { atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
00214 
00215 
00216 inline signed char atomic_fetch_add_explicit
00217 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
00218 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
00219 
00220 inline signed char atomic_fetch_add
00221 ( volatile atomic_schar* __a__, signed char __m__ )
00222 { atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
00223 
00224 
00225 inline signed char atomic_fetch_sub_explicit
00226 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
00227 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
00228 
00229 inline signed char atomic_fetch_sub
00230 ( volatile atomic_schar* __a__, signed char __m__ )
00231 { atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
00232 
00233 
00234 inline signed char atomic_fetch_and_explicit
00235 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
00236 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
00237 
00238 inline signed char atomic_fetch_and
00239 ( volatile atomic_schar* __a__, signed char __m__ )
00240 { atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
00241 
00242 
00243 inline signed char atomic_fetch_or_explicit
00244 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
00245 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
00246 
00247 inline signed char atomic_fetch_or
00248 ( volatile atomic_schar* __a__, signed char __m__ )
00249 { atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
00250 
00251 
00252 inline signed char atomic_fetch_xor_explicit
00253 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
00254 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
00255 
00256 inline signed char atomic_fetch_xor
00257 ( volatile atomic_schar* __a__, signed char __m__ )
00258 { atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
00259 
00260 
00261 inline unsigned char atomic_fetch_add_explicit
00262 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
00263 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
00264 
00265 inline unsigned char atomic_fetch_add
00266 ( volatile atomic_uchar* __a__, unsigned char __m__ )
00267 { atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
00268 
00269 
00270 inline unsigned char atomic_fetch_sub_explicit
00271 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
00272 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
00273 
00274 inline unsigned char atomic_fetch_sub
00275 ( volatile atomic_uchar* __a__, unsigned char __m__ )
00276 { atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
00277 
00278 
00279 inline unsigned char atomic_fetch_and_explicit
00280 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
00281 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
00282 
00283 inline unsigned char atomic_fetch_and
00284 ( volatile atomic_uchar* __a__, unsigned char __m__ )
00285 { atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
00286 
00287 
00288 inline unsigned char atomic_fetch_or_explicit
00289 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
00290 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
00291 
00292 inline unsigned char atomic_fetch_or
00293 ( volatile atomic_uchar* __a__, unsigned char __m__ )
00294 { atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
00295 
00296 
00297 inline unsigned char atomic_fetch_xor_explicit
00298 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
00299 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
00300 
00301 inline unsigned char atomic_fetch_xor
00302 ( volatile atomic_uchar* __a__, unsigned char __m__ )
00303 { atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
00304 
00305 
00306 inline short atomic_fetch_add_explicit
00307 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
00308 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
00309 
00310 inline short atomic_fetch_add
00311 ( volatile atomic_short* __a__, short __m__ )
00312 { atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
00313 
00314 
00315 inline short atomic_fetch_sub_explicit
00316 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
00317 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
00318 
00319 inline short atomic_fetch_sub
00320 ( volatile atomic_short* __a__, short __m__ )
00321 { atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
00322 
00323 
00324 inline short atomic_fetch_and_explicit
00325 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
00326 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
00327 
00328 inline short atomic_fetch_and
00329 ( volatile atomic_short* __a__, short __m__ )
00330 { atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
00331 
00332 
00333 inline short atomic_fetch_or_explicit
00334 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
00335 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
00336 
00337 inline short atomic_fetch_or
00338 ( volatile atomic_short* __a__, short __m__ )
00339 { atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
00340 
00341 
00342 inline short atomic_fetch_xor_explicit
00343 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
00344 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
00345 
00346 inline short atomic_fetch_xor
00347 ( volatile atomic_short* __a__, short __m__ )
00348 { atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
00349 
00350 
00351 inline unsigned short atomic_fetch_add_explicit
00352 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
00353 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
00354 
00355 inline unsigned short atomic_fetch_add
00356 ( volatile atomic_ushort* __a__, unsigned short __m__ )
00357 { atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
00358 
00359 
00360 inline unsigned short atomic_fetch_sub_explicit
00361 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
00362 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
00363 
00364 inline unsigned short atomic_fetch_sub
00365 ( volatile atomic_ushort* __a__, unsigned short __m__ )
00366 { atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
00367 
00368 
00369 inline unsigned short atomic_fetch_and_explicit
00370 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
00371 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
00372 
00373 inline unsigned short atomic_fetch_and
00374 ( volatile atomic_ushort* __a__, unsigned short __m__ )
00375 { atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
00376 
00377 
00378 inline unsigned short atomic_fetch_or_explicit
00379 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
00380 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
00381 
00382 inline unsigned short atomic_fetch_or
00383 ( volatile atomic_ushort* __a__, unsigned short __m__ )
00384 { atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
00385 
00386 
00387 inline unsigned short atomic_fetch_xor_explicit
00388 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
00389 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
00390 
00391 inline unsigned short atomic_fetch_xor
00392 ( volatile atomic_ushort* __a__, unsigned short __m__ )
00393 { atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
00394 
00395 
00396 inline int atomic_fetch_add_explicit
00397 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
00398 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
00399 
00400 inline int atomic_fetch_add
00401 ( volatile atomic_int* __a__, int __m__ )
00402 { atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
00403 
00404 
00405 inline int atomic_fetch_sub_explicit
00406 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
00407 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
00408 
00409 inline int atomic_fetch_sub
00410 ( volatile atomic_int* __a__, int __m__ )
00411 { atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
00412 
00413 
00414 inline int atomic_fetch_and_explicit
00415 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
00416 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
00417 
00418 inline int atomic_fetch_and
00419 ( volatile atomic_int* __a__, int __m__ )
00420 { atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
00421 
00422 
00423 inline int atomic_fetch_or_explicit
00424 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
00425 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
00426 
00427 inline int atomic_fetch_or
00428 ( volatile atomic_int* __a__, int __m__ )
00429 { atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
00430 
00431 
00432 inline int atomic_fetch_xor_explicit
00433 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
00434 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
00435 
00436 inline int atomic_fetch_xor
00437 ( volatile atomic_int* __a__, int __m__ )
00438 { atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
00439 
00440 
00441 inline unsigned int atomic_fetch_add_explicit
00442 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
00443 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
00444 
00445 inline unsigned int atomic_fetch_add
00446 ( volatile atomic_uint* __a__, unsigned int __m__ )
00447 { atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
00448 
00449 
00450 inline unsigned int atomic_fetch_sub_explicit
00451 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
00452 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
00453 
00454 inline unsigned int atomic_fetch_sub
00455 ( volatile atomic_uint* __a__, unsigned int __m__ )
00456 { atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
00457 
00458 
00459 inline unsigned int atomic_fetch_and_explicit
00460 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
00461 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
00462 
00463 inline unsigned int atomic_fetch_and
00464 ( volatile atomic_uint* __a__, unsigned int __m__ )
00465 { atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
00466 
00467 
00468 inline unsigned int atomic_fetch_or_explicit
00469 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
00470 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
00471 
00472 inline unsigned int atomic_fetch_or
00473 ( volatile atomic_uint* __a__, unsigned int __m__ )
00474 { atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
00475 
00476 
00477 inline unsigned int atomic_fetch_xor_explicit
00478 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
00479 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
00480 
00481 inline unsigned int atomic_fetch_xor
00482 ( volatile atomic_uint* __a__, unsigned int __m__ )
00483 { atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
00484 
00485 
00486 inline long atomic_fetch_add_explicit
00487 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
00488 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
00489 
00490 inline long atomic_fetch_add
00491 ( volatile atomic_long* __a__, long __m__ )
00492 { atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
00493 
00494 
00495 inline long atomic_fetch_sub_explicit
00496 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
00497 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
00498 
00499 inline long atomic_fetch_sub
00500 ( volatile atomic_long* __a__, long __m__ )
00501 { atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
00502 
00503 
00504 inline long atomic_fetch_and_explicit
00505 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
00506 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
00507 
00508 inline long atomic_fetch_and
00509 ( volatile atomic_long* __a__, long __m__ )
00510 { atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
00511 
00512 
00513 inline long atomic_fetch_or_explicit
00514 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
00515 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
00516 
00517 inline long atomic_fetch_or
00518 ( volatile atomic_long* __a__, long __m__ )
00519 { atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
00520 
00521 
00522 inline long atomic_fetch_xor_explicit
00523 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
00524 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
00525 
00526 inline long atomic_fetch_xor
00527 ( volatile atomic_long* __a__, long __m__ )
00528 { atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
00529 
00530 
00531 inline unsigned long atomic_fetch_add_explicit
00532 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
00533 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
00534 
00535 inline unsigned long atomic_fetch_add
00536 ( volatile atomic_ulong* __a__, unsigned long __m__ )
00537 { atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
00538 
00539 
00540 inline unsigned long atomic_fetch_sub_explicit
00541 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
00542 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
00543 
00544 inline unsigned long atomic_fetch_sub
00545 ( volatile atomic_ulong* __a__, unsigned long __m__ )
00546 { atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
00547 
00548 
00549 inline unsigned long atomic_fetch_and_explicit
00550 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
00551 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
00552 
00553 inline unsigned long atomic_fetch_and
00554 ( volatile atomic_ulong* __a__, unsigned long __m__ )
00555 { atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
00556 
00557 
00558 inline unsigned long atomic_fetch_or_explicit
00559 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
00560 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
00561 
00562 inline unsigned long atomic_fetch_or
00563 ( volatile atomic_ulong* __a__, unsigned long __m__ )
00564 { atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
00565 
00566 
00567 inline unsigned long atomic_fetch_xor_explicit
00568 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
00569 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
00570 
00571 inline unsigned long atomic_fetch_xor
00572 ( volatile atomic_ulong* __a__, unsigned long __m__ )
00573 { atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
00574 
00575 
00576 inline long long atomic_fetch_add_explicit
00577 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
00578 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
00579 
00580 inline long long atomic_fetch_add
00581 ( volatile atomic_llong* __a__, long long __m__ )
00582 { atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
00583 
00584 
00585 inline long long atomic_fetch_sub_explicit
00586 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
00587 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
00588 
00589 inline long long atomic_fetch_sub
00590 ( volatile atomic_llong* __a__, long long __m__ )
00591 { atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
00592 
00593 
00594 inline long long atomic_fetch_and_explicit
00595 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
00596 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
00597 
00598 inline long long atomic_fetch_and
00599 ( volatile atomic_llong* __a__, long long __m__ )
00600 { atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
00601 
00602 
00603 inline long long atomic_fetch_or_explicit
00604 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
00605 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
00606 
00607 inline long long atomic_fetch_or
00608 ( volatile atomic_llong* __a__, long long __m__ )
00609 { atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
00610 
00611 
00612 inline long long atomic_fetch_xor_explicit
00613 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
00614 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
00615 
00616 inline long long atomic_fetch_xor
00617 ( volatile atomic_llong* __a__, long long __m__ )
00618 { atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
00619 
00620 
00621 inline unsigned long long atomic_fetch_add_explicit
00622 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
00623 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
00624 
00625 inline unsigned long long atomic_fetch_add
00626 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
00627 { atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
00628 
00629 
00630 inline unsigned long long atomic_fetch_sub_explicit
00631 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
00632 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
00633 
00634 inline unsigned long long atomic_fetch_sub
00635 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
00636 { atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
00637 
00638 
00639 inline unsigned long long atomic_fetch_and_explicit
00640 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
00641 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
00642 
00643 inline unsigned long long atomic_fetch_and
00644 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
00645 { atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
00646 
00647 
00648 inline unsigned long long atomic_fetch_or_explicit
00649 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
00650 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
00651 
00652 inline unsigned long long atomic_fetch_or
00653 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
00654 { atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
00655 
00656 
00657 inline unsigned long long atomic_fetch_xor_explicit
00658 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
00659 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
00660 
00661 inline unsigned long long atomic_fetch_xor
00662 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
00663 { atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }
00664 
00665 
00666 inline wchar_t atomic_fetch_add_explicit
00667 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
00668 { return _ATOMIC_MODIFY_( __a__, +=, __m__, __x__ ); }
00669 
00670 inline wchar_t atomic_fetch_add
00671 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
00672 { atomic_fetch_add_explicit( __a__, __m__, memory_order_seq_cst ); }
00673 
00674 
00675 inline wchar_t atomic_fetch_sub_explicit
00676 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
00677 { return _ATOMIC_MODIFY_( __a__, -=, __m__, __x__ ); }
00678 
00679 inline wchar_t atomic_fetch_sub
00680 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
00681 { atomic_fetch_sub_explicit( __a__, __m__, memory_order_seq_cst ); }
00682 
00683 
00684 inline wchar_t atomic_fetch_and_explicit
00685 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
00686 { return _ATOMIC_MODIFY_( __a__, &=, __m__, __x__ ); }
00687 
00688 inline wchar_t atomic_fetch_and
00689 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
00690 { atomic_fetch_and_explicit( __a__, __m__, memory_order_seq_cst ); }
00691 
00692 
00693 inline wchar_t atomic_fetch_or_explicit
00694 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
00695 { return _ATOMIC_MODIFY_( __a__, |=, __m__, __x__ ); }
00696 
00697 inline wchar_t atomic_fetch_or
00698 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
00699 { atomic_fetch_or_explicit( __a__, __m__, memory_order_seq_cst ); }
00700 
00701 
00702 inline wchar_t atomic_fetch_xor_explicit
00703 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
00704 { return _ATOMIC_MODIFY_( __a__, ^=, __m__, __x__ ); }
00705 
00706 inline wchar_t atomic_fetch_xor
00707 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
00708 { atomic_fetch_xor_explicit( __a__, __m__, memory_order_seq_cst ); }