00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011
00012
00013
00014
00015
00016
00017 #ifndef IMPATOMIC_H
00018 #define IMPATOMIC_H
00019
00020 #include <amino/config.h>
00021
00022 #ifdef __cplusplus
00023 #include <cstddef>
00024 namespace std {
00025 #else
00026 #include <stddef.h>
00027 #include <stdbool.h>
00028 #endif
00029
00030 #define CPP0X( feature )
00031
00032 #if defined(X86)
00033 #define ALIGN_DEF __attribute__((aligned(64)))
00034 #else
00035 #define ALIGN_DEF
00036 #endif
00037
00038 typedef enum memory_order {
00039 memory_order_relaxed, memory_order_acquire, memory_order_release,
00040 memory_order_acq_rel, memory_order_seq_cst
00041 } memory_order;
00042
00043
00044 typedef struct atomic_flag
00045 {
00046 #ifdef __cplusplus
00047 bool test_and_set( memory_order = memory_order_seq_cst ) volatile;
00048 void clear( memory_order = memory_order_seq_cst ) volatile;
00049 void fence( memory_order ) const volatile;
00050
00051 CPP0X( atomic_flag() = default; )
00052 CPP0X( atomic_flag( const atomic_flag& ) = delete; )
00053 atomic_flag& operator =( const atomic_flag& ) CPP0X(=delete);
00054
00055 CPP0X(private:)
00056 #endif
00057 volatile bool __f__;
00058 } atomic_flag;
00059
00060 #define ATOMIC_FLAG_INIT { false }
00061
00062 #if defined(GCC) && defined(X86)
00063 #include "atomic_base/x86.h"
00064 #else
00065 #include "atomic_base/generic.h"
00066 #endif
00067
00068 typedef struct atomic_bool
00069 {
00070 #ifdef __cplusplus
00071 bool is_lock_free() const volatile;
00072 void store( bool, memory_order = memory_order_seq_cst ) volatile;
00073 bool load( memory_order = memory_order_seq_cst ) volatile;
00074 bool swap( bool, memory_order = memory_order_seq_cst ) volatile;
00075 bool compare_swap ( bool&, bool, memory_order, memory_order ) volatile;
00076 bool compare_swap ( bool&, bool,
00077 memory_order = memory_order_seq_cst) volatile;
00078 void fence( memory_order ) const volatile;
00079
00080 CPP0X( atomic_bool() = delete; )
00081 CPP0X( constexpr explicit atomic_bool( bool __v__ ) : __f__( __v__ ) { } )
00082 CPP0X( atomic_bool( const atomic_bool& ) = delete; )
00083 atomic_bool& operator =( const atomic_bool& ) CPP0X(=delete);
00084
00085 bool operator =( bool __v__ ) volatile
00086 { store( __v__ ); return __v__; }
00087
00088 friend void atomic_store_explicit( volatile atomic_bool*, bool,
00089 memory_order );
00090 friend bool atomic_load_explicit( volatile atomic_bool*, memory_order );
00091 friend bool atomic_swap_explicit( volatile atomic_bool*, bool,
00092 memory_order );
00093 friend bool atomic_compare_swap_explicit( volatile atomic_bool*, bool*, bool,
00094 memory_order, memory_order );
00095 friend void atomic_fence( const volatile atomic_bool*, memory_order );
00096
00097 CPP0X(private:)
00098 #endif
00099 volatile bool __f__;
00100 } atomic_bool;
00101
00102
00103 typedef struct atomic_address
00104 {
00105 #ifdef __cplusplus
00106 bool is_lock_free() const volatile;
00107 void store( void*, memory_order = memory_order_seq_cst ) volatile;
00108 void* load( memory_order = memory_order_seq_cst ) const volatile;
00109 void* swap( void*, memory_order = memory_order_seq_cst ) volatile;
00110 bool compare_swap( void*&, void*, memory_order, memory_order ) volatile;
00111 bool compare_swap( void*&, void*,
00112 memory_order = memory_order_seq_cst ) volatile;
00113 void fence( memory_order ) const volatile;
00114 void* fetch_add( ptrdiff_t, memory_order = memory_order_seq_cst ) volatile;
00115 void* fetch_sub( ptrdiff_t, memory_order = memory_order_seq_cst ) volatile;
00116
00117 CPP0X( atomic_address() = default; )
00118 CPP0X( constexpr explicit atomic_address( void* __v__ ) : __f__( __v__) { } )
00119 CPP0X( atomic_address( const atomic_address& ) = delete; )
00120 atomic_address& operator =( const atomic_address & ) CPP0X(=delete);
00121
00122 void* operator =( void* __v__ ) volatile
00123 { store( __v__ ); return __v__; }
00124
00125 void* operator +=( ptrdiff_t __v__ ) volatile
00126 { return fetch_add( __v__ ); }
00127
00128 void* operator -=( ptrdiff_t __v__ ) volatile
00129 { return fetch_sub( __v__ ); }
00130
00131 friend void atomic_store_explicit( volatile atomic_address*, void*,
00132 memory_order );
00133 friend void* atomic_load_explicit( const volatile atomic_address* const, memory_order );
00134 friend void* atomic_swap_explicit( volatile atomic_address*, void*,
00135 memory_order );
00136 friend bool atomic_compare_swap_explicit( volatile atomic_address*,
00137 void**, void*, memory_order, memory_order );
00138 friend void atomic_fence( const volatile atomic_address*, memory_order );
00139 friend void* atomic_fetch_add_explicit( volatile atomic_address*, ptrdiff_t,
00140 memory_order );
00141 friend void* atomic_fetch_sub_explicit( volatile atomic_address*, ptrdiff_t,
00142 memory_order );
00143 #endif
00144 void * volatile __f__;
00145 } ALIGN_DEF atomic_address;
00146
00147
00148 typedef struct atomic_char
00149 {
00150 #ifdef __cplusplus
00151 bool is_lock_free() const volatile;
00152 void store( char,
00153 memory_order = memory_order_seq_cst ) volatile;
00154 char load( memory_order = memory_order_seq_cst ) volatile;
00155 char swap( char,
00156 memory_order = memory_order_seq_cst ) volatile;
00157 bool compare_swap( char&, char,
00158 memory_order, memory_order ) volatile;
00159 bool compare_swap( char&, char,
00160 memory_order = memory_order_seq_cst ) volatile;
00161 void fence( memory_order ) const volatile;
00162 char fetch_add( char,
00163 memory_order = memory_order_seq_cst ) volatile;
00164 char fetch_sub( char,
00165 memory_order = memory_order_seq_cst ) volatile;
00166 char fetch_and( char,
00167 memory_order = memory_order_seq_cst ) volatile;
00168 char fetch_or( char,
00169 memory_order = memory_order_seq_cst ) volatile;
00170 char fetch_xor( char,
00171 memory_order = memory_order_seq_cst ) volatile;
00172
00173 CPP0X( atomic_char() = default; )
00174 CPP0X( constexpr atomic_char( char __v__ ) : __f__( __v__) { } )
00175 CPP0X( atomic_char( const atomic_char& ) = delete; )
00176 atomic_char& operator =( const atomic_char& ) CPP0X(=delete);
00177
00178 char operator =( char __v__ ) volatile
00179 { store( __v__ ); return __v__; }
00180
00181 char operator ++( int ) volatile
00182 { return fetch_add( 1 ); }
00183
00184 char operator --( int ) volatile
00185 { return fetch_sub( 1 ); }
00186
00187 char operator ++() volatile
00188 { return fetch_add( 1 ) + 1; }
00189
00190 char operator --() volatile
00191 { return fetch_sub( 1 ) - 1; }
00192
00193 char operator +=( char __v__ ) volatile
00194 { return fetch_add( __v__ ) + __v__; }
00195
00196 char operator -=( char __v__ ) volatile
00197 { return fetch_sub( __v__ ) - __v__; }
00198
00199 char operator &=( char __v__ ) volatile
00200 { return fetch_and( __v__ ) & __v__; }
00201
00202 char operator |=( char __v__ ) volatile
00203 { return fetch_or( __v__ ) | __v__; }
00204
00205 char operator ^=( char __v__ ) volatile
00206 { return fetch_xor( __v__ ) ^ __v__; }
00207
00208 friend void atomic_store_explicit( volatile atomic_char*, char,
00209 memory_order );
00210 friend char atomic_load_explicit( volatile atomic_char*,
00211 memory_order );
00212 friend char atomic_swap_explicit( volatile atomic_char*,
00213 char, memory_order );
00214 friend bool atomic_compare_swap_explicit( volatile atomic_char*,
00215 char*, char, memory_order, memory_order );
00216 friend void atomic_fence( const volatile atomic_char*, memory_order );
00217 friend char atomic_fetch_add_explicit( volatile atomic_char*,
00218 char, memory_order );
00219 friend char atomic_fetch_sub_explicit( volatile atomic_char*,
00220 char, memory_order );
00221 friend char atomic_fetch_and_explicit( volatile atomic_char*,
00222 char, memory_order );
00223 friend char atomic_fetch_or_explicit( volatile atomic_char*,
00224 char, memory_order );
00225 friend char atomic_fetch_xor_explicit( volatile atomic_char*,
00226 char, memory_order );
00227
00228 CPP0X(private:)
00229 #endif
00230 volatile char __f__;
00231 } atomic_char;
00232
00233
00234 typedef struct atomic_schar
00235 {
00236 #ifdef __cplusplus
00237 bool is_lock_free() const volatile;
00238 void store( signed char,
00239 memory_order = memory_order_seq_cst ) volatile;
00240 signed char load( memory_order = memory_order_seq_cst ) volatile;
00241 signed char swap( signed char,
00242 memory_order = memory_order_seq_cst ) volatile;
00243 bool compare_swap( signed char&, signed char,
00244 memory_order, memory_order ) volatile;
00245 bool compare_swap( signed char&, signed char,
00246 memory_order = memory_order_seq_cst ) volatile;
00247 void fence( memory_order ) const volatile;
00248 signed char fetch_add( signed char,
00249 memory_order = memory_order_seq_cst ) volatile;
00250 signed char fetch_sub( signed char,
00251 memory_order = memory_order_seq_cst ) volatile;
00252 signed char fetch_and( signed char,
00253 memory_order = memory_order_seq_cst ) volatile;
00254 signed char fetch_or( signed char,
00255 memory_order = memory_order_seq_cst ) volatile;
00256 signed char fetch_xor( signed char,
00257 memory_order = memory_order_seq_cst ) volatile;
00258
00259 CPP0X( atomic_schar() = default; )
00260 CPP0X( constexpr atomic_schar( signed char __v__ ) : __f__( __v__) { } )
00261 CPP0X( atomic_schar( const atomic_schar& ) = delete; )
00262 atomic_schar& operator =( const atomic_schar& ) CPP0X(=delete);
00263
00264 signed char operator =( signed char __v__ ) volatile
00265 { store( __v__ ); return __v__; }
00266
00267 signed char operator ++( int ) volatile
00268 { return fetch_add( 1 ); }
00269
00270 signed char operator --( int ) volatile
00271 { return fetch_sub( 1 ); }
00272
00273 signed char operator ++() volatile
00274 { return fetch_add( 1 ) + 1; }
00275
00276 signed char operator --() volatile
00277 { return fetch_sub( 1 ) - 1; }
00278
00279 signed char operator +=( signed char __v__ ) volatile
00280 { return fetch_add( __v__ ) + __v__; }
00281
00282 signed char operator -=( signed char __v__ ) volatile
00283 { return fetch_sub( __v__ ) - __v__; }
00284
00285 signed char operator &=( signed char __v__ ) volatile
00286 { return fetch_and( __v__ ) & __v__; }
00287
00288 signed char operator |=( signed char __v__ ) volatile
00289 { return fetch_or( __v__ ) | __v__; }
00290
00291 signed char operator ^=( signed char __v__ ) volatile
00292 { return fetch_xor( __v__ ) ^ __v__; }
00293
00294 friend void atomic_store_explicit( volatile atomic_schar*, signed char,
00295 memory_order );
00296 friend signed char atomic_load_explicit( volatile atomic_schar*,
00297 memory_order );
00298 friend signed char atomic_swap_explicit( volatile atomic_schar*,
00299 signed char, memory_order );
00300 friend bool atomic_compare_swap_explicit( volatile atomic_schar*,
00301 signed char*, signed char, memory_order, memory_order );
00302 friend void atomic_fence( const volatile atomic_schar*, memory_order );
00303 friend signed char atomic_fetch_add_explicit( volatile atomic_schar*,
00304 signed char, memory_order );
00305 friend signed char atomic_fetch_sub_explicit( volatile atomic_schar*,
00306 signed char, memory_order );
00307 friend signed char atomic_fetch_and_explicit( volatile atomic_schar*,
00308 signed char, memory_order );
00309 friend signed char atomic_fetch_or_explicit( volatile atomic_schar*,
00310 signed char, memory_order );
00311 friend signed char atomic_fetch_xor_explicit( volatile atomic_schar*,
00312 signed char, memory_order );
00313
00314 CPP0X(private:)
00315 #endif
00316 volatile signed char __f__;
00317 } atomic_schar;
00318
00319
00320 typedef struct atomic_uchar
00321 {
00322 #ifdef __cplusplus
00323 bool is_lock_free() const volatile;
00324 void store( unsigned char,
00325 memory_order = memory_order_seq_cst ) volatile;
00326 unsigned char load( memory_order = memory_order_seq_cst ) volatile;
00327 unsigned char swap( unsigned char,
00328 memory_order = memory_order_seq_cst ) volatile;
00329 bool compare_swap( unsigned char&, unsigned char,
00330 memory_order, memory_order ) volatile;
00331 bool compare_swap( unsigned char&, unsigned char,
00332 memory_order = memory_order_seq_cst ) volatile;
00333 void fence( memory_order ) const volatile;
00334 unsigned char fetch_add( unsigned char,
00335 memory_order = memory_order_seq_cst ) volatile;
00336 unsigned char fetch_sub( unsigned char,
00337 memory_order = memory_order_seq_cst ) volatile;
00338 unsigned char fetch_and( unsigned char,
00339 memory_order = memory_order_seq_cst ) volatile;
00340 unsigned char fetch_or( unsigned char,
00341 memory_order = memory_order_seq_cst ) volatile;
00342 unsigned char fetch_xor( unsigned char,
00343 memory_order = memory_order_seq_cst ) volatile;
00344
00345 CPP0X( atomic_uchar() = default; )
00346 CPP0X( constexpr atomic_uchar( unsigned char __v__ ) : __f__( __v__) { } )
00347 CPP0X( atomic_uchar( const atomic_uchar& ) = delete; )
00348 atomic_uchar& operator =( const atomic_uchar& ) CPP0X(=delete);
00349
00350 unsigned char operator =( unsigned char __v__ ) volatile
00351 { store( __v__ ); return __v__; }
00352
00353 unsigned char operator ++( int ) volatile
00354 { return fetch_add( 1 ); }
00355
00356 unsigned char operator --( int ) volatile
00357 { return fetch_sub( 1 ); }
00358
00359 unsigned char operator ++() volatile
00360 { return fetch_add( 1 ) + 1; }
00361
00362 unsigned char operator --() volatile
00363 { return fetch_sub( 1 ) - 1; }
00364
00365 unsigned char operator +=( unsigned char __v__ ) volatile
00366 { return fetch_add( __v__ ) + __v__; }
00367
00368 unsigned char operator -=( unsigned char __v__ ) volatile
00369 { return fetch_sub( __v__ ) - __v__; }
00370
00371 unsigned char operator &=( unsigned char __v__ ) volatile
00372 { return fetch_and( __v__ ) & __v__; }
00373
00374 unsigned char operator |=( unsigned char __v__ ) volatile
00375 { return fetch_or( __v__ ) | __v__; }
00376
00377 unsigned char operator ^=( unsigned char __v__ ) volatile
00378 { return fetch_xor( __v__ ) ^ __v__; }
00379
00380 friend void atomic_store_explicit( volatile atomic_uchar*, unsigned char,
00381 memory_order );
00382 friend unsigned char atomic_load_explicit( volatile atomic_uchar*,
00383 memory_order );
00384 friend unsigned char atomic_swap_explicit( volatile atomic_uchar*,
00385 unsigned char, memory_order );
00386 friend bool atomic_compare_swap_explicit( volatile atomic_uchar*,
00387 unsigned char*, unsigned char, memory_order, memory_order );
00388 friend void atomic_fence( const volatile atomic_uchar*, memory_order );
00389 friend unsigned char atomic_fetch_add_explicit( volatile atomic_uchar*,
00390 unsigned char, memory_order );
00391 friend unsigned char atomic_fetch_sub_explicit( volatile atomic_uchar*,
00392 unsigned char, memory_order );
00393 friend unsigned char atomic_fetch_and_explicit( volatile atomic_uchar*,
00394 unsigned char, memory_order );
00395 friend unsigned char atomic_fetch_or_explicit( volatile atomic_uchar*,
00396 unsigned char, memory_order );
00397 friend unsigned char atomic_fetch_xor_explicit( volatile atomic_uchar*,
00398 unsigned char, memory_order );
00399
00400 CPP0X(private:)
00401 #endif
00402 volatile unsigned char __f__;
00403 } atomic_uchar;
00404
00405
00406 typedef struct atomic_short
00407 {
00408 #ifdef __cplusplus
00409 bool is_lock_free() const volatile;
00410 void store( short,
00411 memory_order = memory_order_seq_cst ) volatile;
00412 short load( memory_order = memory_order_seq_cst ) volatile;
00413 short swap( short,
00414 memory_order = memory_order_seq_cst ) volatile;
00415 bool compare_swap( short&, short,
00416 memory_order, memory_order ) volatile;
00417 bool compare_swap( short&, short,
00418 memory_order = memory_order_seq_cst ) volatile;
00419 void fence( memory_order ) const volatile;
00420 short fetch_add( short,
00421 memory_order = memory_order_seq_cst ) volatile;
00422 short fetch_sub( short,
00423 memory_order = memory_order_seq_cst ) volatile;
00424 short fetch_and( short,
00425 memory_order = memory_order_seq_cst ) volatile;
00426 short fetch_or( short,
00427 memory_order = memory_order_seq_cst ) volatile;
00428 short fetch_xor( short,
00429 memory_order = memory_order_seq_cst ) volatile;
00430
00431 CPP0X( atomic_short() = default; )
00432 CPP0X( constexpr atomic_short( short __v__ ) : __f__( __v__) { } )
00433 CPP0X( atomic_short( const atomic_short& ) = delete; )
00434 atomic_short& operator =( const atomic_short& ) CPP0X(=delete);
00435
00436 short operator =( short __v__ ) volatile
00437 { store( __v__ ); return __v__; }
00438
00439 short operator ++( int ) volatile
00440 { return fetch_add( 1 ); }
00441
00442 short operator --( int ) volatile
00443 { return fetch_sub( 1 ); }
00444
00445 short operator ++() volatile
00446 { return fetch_add( 1 ) + 1; }
00447
00448 short operator --() volatile
00449 { return fetch_sub( 1 ) - 1; }
00450
00451 short operator +=( short __v__ ) volatile
00452 { return fetch_add( __v__ ) + __v__; }
00453
00454 short operator -=( short __v__ ) volatile
00455 { return fetch_sub( __v__ ) - __v__; }
00456
00457 short operator &=( short __v__ ) volatile
00458 { return fetch_and( __v__ ) & __v__; }
00459
00460 short operator |=( short __v__ ) volatile
00461 { return fetch_or( __v__ ) | __v__; }
00462
00463 short operator ^=( short __v__ ) volatile
00464 { return fetch_xor( __v__ ) ^ __v__; }
00465
00466 friend void atomic_store_explicit( volatile atomic_short*, short,
00467 memory_order );
00468 friend short atomic_load_explicit( volatile atomic_short*,
00469 memory_order );
00470 friend short atomic_swap_explicit( volatile atomic_short*,
00471 short, memory_order );
00472 friend bool atomic_compare_swap_explicit( volatile atomic_short*,
00473 short*, short, memory_order, memory_order );
00474 friend void atomic_fence( const volatile atomic_short*, memory_order );
00475 friend short atomic_fetch_add_explicit( volatile atomic_short*,
00476 short, memory_order );
00477 friend short atomic_fetch_sub_explicit( volatile atomic_short*,
00478 short, memory_order );
00479 friend short atomic_fetch_and_explicit( volatile atomic_short*,
00480 short, memory_order );
00481 friend short atomic_fetch_or_explicit( volatile atomic_short*,
00482 short, memory_order );
00483 friend short atomic_fetch_xor_explicit( volatile atomic_short*,
00484 short, memory_order );
00485
00486 CPP0X(private:)
00487 #endif
00488 volatile short __f__;
00489 } atomic_short;
00490
00491
00492 typedef struct atomic_ushort
00493 {
00494 #ifdef __cplusplus
00495 bool is_lock_free() const volatile;
00496 void store( unsigned short,
00497 memory_order = memory_order_seq_cst ) volatile;
00498 unsigned short load( memory_order = memory_order_seq_cst ) volatile;
00499 unsigned short swap( unsigned short,
00500 memory_order = memory_order_seq_cst ) volatile;
00501 bool compare_swap( unsigned short&, unsigned short,
00502 memory_order, memory_order ) volatile;
00503 bool compare_swap( unsigned short&, unsigned short,
00504 memory_order = memory_order_seq_cst ) volatile;
00505 void fence( memory_order ) const volatile;
00506 unsigned short fetch_add( unsigned short,
00507 memory_order = memory_order_seq_cst ) volatile;
00508 unsigned short fetch_sub( unsigned short,
00509 memory_order = memory_order_seq_cst ) volatile;
00510 unsigned short fetch_and( unsigned short,
00511 memory_order = memory_order_seq_cst ) volatile;
00512 unsigned short fetch_or( unsigned short,
00513 memory_order = memory_order_seq_cst ) volatile;
00514 unsigned short fetch_xor( unsigned short,
00515 memory_order = memory_order_seq_cst ) volatile;
00516
00517 CPP0X( atomic_ushort() = default; )
00518 CPP0X( constexpr atomic_ushort( unsigned short __v__ ) : __f__( __v__) { } )
00519 CPP0X( atomic_ushort( const atomic_ushort& ) = delete; )
00520 atomic_ushort& operator =( const atomic_ushort& ) CPP0X(=delete);
00521
00522 unsigned short operator =( unsigned short __v__ ) volatile
00523 { store( __v__ ); return __v__; }
00524
00525 unsigned short operator ++( int ) volatile
00526 { return fetch_add( 1 ); }
00527
00528 unsigned short operator --( int ) volatile
00529 { return fetch_sub( 1 ); }
00530
00531 unsigned short operator ++() volatile
00532 { return fetch_add( 1 ) + 1; }
00533
00534 unsigned short operator --() volatile
00535 { return fetch_sub( 1 ) - 1; }
00536
00537 unsigned short operator +=( unsigned short __v__ ) volatile
00538 { return fetch_add( __v__ ) + __v__; }
00539
00540 unsigned short operator -=( unsigned short __v__ ) volatile
00541 { return fetch_sub( __v__ ) - __v__; }
00542
00543 unsigned short operator &=( unsigned short __v__ ) volatile
00544 { return fetch_and( __v__ ) & __v__; }
00545
00546 unsigned short operator |=( unsigned short __v__ ) volatile
00547 { return fetch_or( __v__ ) | __v__; }
00548
00549 unsigned short operator ^=( unsigned short __v__ ) volatile
00550 { return fetch_xor( __v__ ) ^ __v__; }
00551
00552 friend void atomic_store_explicit( volatile atomic_ushort*, unsigned short,
00553 memory_order );
00554 friend unsigned short atomic_load_explicit( volatile atomic_ushort*,
00555 memory_order );
00556 friend unsigned short atomic_swap_explicit( volatile atomic_ushort*,
00557 unsigned short, memory_order );
00558 friend bool atomic_compare_swap_explicit( volatile atomic_ushort*,
00559 unsigned short*, unsigned short, memory_order, memory_order );
00560 friend void atomic_fence( const volatile atomic_ushort*, memory_order );
00561 friend unsigned short atomic_fetch_add_explicit( volatile atomic_ushort*,
00562 unsigned short, memory_order );
00563 friend unsigned short atomic_fetch_sub_explicit( volatile atomic_ushort*,
00564 unsigned short, memory_order );
00565 friend unsigned short atomic_fetch_and_explicit( volatile atomic_ushort*,
00566 unsigned short, memory_order );
00567 friend unsigned short atomic_fetch_or_explicit( volatile atomic_ushort*,
00568 unsigned short, memory_order );
00569 friend unsigned short atomic_fetch_xor_explicit( volatile atomic_ushort*,
00570 unsigned short, memory_order );
00571
00572 CPP0X(private:)
00573 #endif
00574 volatile unsigned short __f__;
00575 } atomic_ushort;
00576
00577
00578 typedef struct atomic_int
00579 {
00580 #ifdef __cplusplus
00581 bool is_lock_free() const volatile;
00582 void store( int,
00583 memory_order = memory_order_seq_cst ) volatile;
00584 int load( memory_order = memory_order_seq_cst ) volatile;
00585 int swap( int,
00586 memory_order = memory_order_seq_cst ) volatile;
00587 bool compare_swap( int&, int,
00588 memory_order, memory_order ) volatile;
00589 bool compare_swap( int&, int,
00590 memory_order = memory_order_seq_cst ) volatile;
00591 void fence( memory_order ) const volatile;
00592 int fetch_add( int,
00593 memory_order = memory_order_seq_cst ) volatile;
00594 int fetch_sub( int,
00595 memory_order = memory_order_seq_cst ) volatile;
00596 int fetch_and( int,
00597 memory_order = memory_order_seq_cst ) volatile;
00598 int fetch_or( int,
00599 memory_order = memory_order_seq_cst ) volatile;
00600 int fetch_xor( int,
00601 memory_order = memory_order_seq_cst ) volatile;
00602
00603 CPP0X( atomic_int() = default; )
00604 CPP0X( constexpr atomic_int( int __v__ ) : __f__( __v__) { } )
00605 CPP0X( atomic_int( const atomic_int& ) = delete; )
00606 atomic_int& operator =( const atomic_int& ) CPP0X(=delete);
00607
00608 int operator =( int __v__ ) volatile
00609 { store( __v__ ); return __v__; }
00610
00611 int operator ++( int ) volatile
00612 { return fetch_add( 1 ); }
00613
00614 int operator --( int ) volatile
00615 { return fetch_sub( 1 ); }
00616
00617 int operator ++() volatile
00618 { return fetch_add( 1 ) + 1; }
00619
00620 int operator --() volatile
00621 { return fetch_sub( 1 ) - 1; }
00622
00623 int operator +=( int __v__ ) volatile
00624 { return fetch_add( __v__ ) + __v__; }
00625
00626 int operator -=( int __v__ ) volatile
00627 { return fetch_sub( __v__ ) - __v__; }
00628
00629 int operator &=( int __v__ ) volatile
00630 { return fetch_and( __v__ ) & __v__; }
00631
00632 int operator |=( int __v__ ) volatile
00633 { return fetch_or( __v__ ) | __v__; }
00634
00635 int operator ^=( int __v__ ) volatile
00636 { return fetch_xor( __v__ ) ^ __v__; }
00637
00638 friend void atomic_store_explicit( volatile atomic_int*, int,
00639 memory_order );
00640 friend int atomic_load_explicit( volatile atomic_int*,
00641 memory_order );
00642 friend int atomic_swap_explicit( volatile atomic_int*,
00643 int, memory_order );
00644 friend bool atomic_compare_swap_explicit( volatile atomic_int*,
00645 int*, int, memory_order, memory_order );
00646 friend void atomic_fence( const volatile atomic_int*, memory_order );
00647 friend int atomic_fetch_add_explicit( volatile atomic_int*,
00648 int, memory_order );
00649 friend int atomic_fetch_sub_explicit( volatile atomic_int*,
00650 int, memory_order );
00651 friend int atomic_fetch_and_explicit( volatile atomic_int*,
00652 int, memory_order );
00653 friend int atomic_fetch_or_explicit( volatile atomic_int*,
00654 int, memory_order );
00655 friend int atomic_fetch_xor_explicit( volatile atomic_int*,
00656 int, memory_order );
00657
00658 CPP0X(private:)
00659 #endif
00660 volatile int __f__;
00661 } ALIGN_DEF atomic_int;
00662
00663
00664 typedef struct atomic_uint
00665 {
00666 #ifdef __cplusplus
00667 bool is_lock_free() const volatile;
00668 void store( unsigned int,
00669 memory_order = memory_order_seq_cst ) volatile;
00670 unsigned int load( memory_order = memory_order_seq_cst ) volatile;
00671 unsigned int swap( unsigned int,
00672 memory_order = memory_order_seq_cst ) volatile;
00673 bool compare_swap( unsigned int&, unsigned int,
00674 memory_order, memory_order ) volatile;
00675 bool compare_swap( unsigned int&, unsigned int,
00676 memory_order = memory_order_seq_cst ) volatile;
00677 void fence( memory_order ) const volatile;
00678 unsigned int fetch_add( unsigned int,
00679 memory_order = memory_order_seq_cst ) volatile;
00680 unsigned int fetch_sub( unsigned int,
00681 memory_order = memory_order_seq_cst ) volatile;
00682 unsigned int fetch_and( unsigned int,
00683 memory_order = memory_order_seq_cst ) volatile;
00684 unsigned int fetch_or( unsigned int,
00685 memory_order = memory_order_seq_cst ) volatile;
00686 unsigned int fetch_xor( unsigned int,
00687 memory_order = memory_order_seq_cst ) volatile;
00688
00689 CPP0X( atomic_uint() = default; )
00690 CPP0X( constexpr atomic_uint( unsigned int __v__ ) : __f__( __v__) { } )
00691 CPP0X( atomic_uint( const atomic_uint& ) = delete; )
00692 atomic_uint& operator =( const atomic_uint& ) CPP0X(=delete);
00693
00694 unsigned int operator =( unsigned int __v__ ) volatile
00695 { store( __v__ ); return __v__; }
00696
00697 unsigned int operator ++( int ) volatile
00698 { return fetch_add( 1 ); }
00699
00700 unsigned int operator --( int ) volatile
00701 { return fetch_sub( 1 ); }
00702
00703 unsigned int operator ++() volatile
00704 { return fetch_add( 1 ) + 1; }
00705
00706 unsigned int operator --() volatile
00707 { return fetch_sub( 1 ) - 1; }
00708
00709 unsigned int operator +=( unsigned int __v__ ) volatile
00710 { return fetch_add( __v__ ) + __v__; }
00711
00712 unsigned int operator -=( unsigned int __v__ ) volatile
00713 { return fetch_sub( __v__ ) - __v__; }
00714
00715 unsigned int operator &=( unsigned int __v__ ) volatile
00716 { return fetch_and( __v__ ) & __v__; }
00717
00718 unsigned int operator |=( unsigned int __v__ ) volatile
00719 { return fetch_or( __v__ ) | __v__; }
00720
00721 unsigned int operator ^=( unsigned int __v__ ) volatile
00722 { return fetch_xor( __v__ ) ^ __v__; }
00723
00724 friend void atomic_store_explicit( volatile atomic_uint*, unsigned int,
00725 memory_order );
00726 friend unsigned int atomic_load_explicit( volatile atomic_uint*,
00727 memory_order );
00728 friend unsigned int atomic_swap_explicit( volatile atomic_uint*,
00729 unsigned int, memory_order );
00730 friend bool atomic_compare_swap_explicit( volatile atomic_uint*,
00731 unsigned int*, unsigned int, memory_order, memory_order );
00732 friend void atomic_fence( const volatile atomic_uint*, memory_order );
00733 friend unsigned int atomic_fetch_add_explicit( volatile atomic_uint*,
00734 unsigned int, memory_order );
00735 friend unsigned int atomic_fetch_sub_explicit( volatile atomic_uint*,
00736 unsigned int, memory_order );
00737 friend unsigned int atomic_fetch_and_explicit( volatile atomic_uint*,
00738 unsigned int, memory_order );
00739 friend unsigned int atomic_fetch_or_explicit( volatile atomic_uint*,
00740 unsigned int, memory_order );
00741 friend unsigned int atomic_fetch_xor_explicit( volatile atomic_uint*,
00742 unsigned int, memory_order );
00743
00744 CPP0X(private:)
00745 #endif
00746 volatile unsigned int __f__;
00747 } ALIGN_DEF atomic_uint;
00748
00749
00750 typedef struct atomic_long
00751 {
00752 #ifdef __cplusplus
00753 bool is_lock_free() const volatile;
00754 void store( long,
00755 memory_order = memory_order_seq_cst ) volatile;
00756 long load( memory_order = memory_order_seq_cst ) volatile;
00757 long swap( long,
00758 memory_order = memory_order_seq_cst ) volatile;
00759 bool compare_swap( long&, long,
00760 memory_order, memory_order ) volatile;
00761 bool compare_swap( long&, long,
00762 memory_order = memory_order_seq_cst ) volatile;
00763 void fence( memory_order ) const volatile;
00764 long fetch_add( long,
00765 memory_order = memory_order_seq_cst ) volatile;
00766 long fetch_sub( long,
00767 memory_order = memory_order_seq_cst ) volatile;
00768 long fetch_and( long,
00769 memory_order = memory_order_seq_cst ) volatile;
00770 long fetch_or( long,
00771 memory_order = memory_order_seq_cst ) volatile;
00772 long fetch_xor( long,
00773 memory_order = memory_order_seq_cst ) volatile;
00774
00775 CPP0X( atomic_long() = default; )
00776 CPP0X( constexpr atomic_long( long __v__ ) : __f__( __v__) { } )
00777 CPP0X( atomic_long( const atomic_long& ) = delete; )
00778 atomic_long& operator =( const atomic_long& ) CPP0X(=delete);
00779
00780 long operator =( long __v__ ) volatile
00781 { store( __v__ ); return __v__; }
00782
00783 long operator ++( int ) volatile
00784 { return fetch_add( 1 ); }
00785
00786 long operator --( int ) volatile
00787 { return fetch_sub( 1 ); }
00788
00789 long operator ++() volatile
00790 { return fetch_add( 1 ) + 1; }
00791
00792 long operator --() volatile
00793 { return fetch_sub( 1 ) - 1; }
00794
00795 long operator +=( long __v__ ) volatile
00796 { return fetch_add( __v__ ) + __v__; }
00797
00798 long operator -=( long __v__ ) volatile
00799 { return fetch_sub( __v__ ) - __v__; }
00800
00801 long operator &=( long __v__ ) volatile
00802 { return fetch_and( __v__ ) & __v__; }
00803
00804 long operator |=( long __v__ ) volatile
00805 { return fetch_or( __v__ ) | __v__; }
00806
00807 long operator ^=( long __v__ ) volatile
00808 { return fetch_xor( __v__ ) ^ __v__; }
00809
00810 friend void atomic_store_explicit( volatile atomic_long*, long,
00811 memory_order );
00812 friend long atomic_load_explicit( volatile atomic_long*,
00813 memory_order );
00814 friend long atomic_swap_explicit( volatile atomic_long*,
00815 long, memory_order );
00816 friend bool atomic_compare_swap_explicit( volatile atomic_long*,
00817 long*, long, memory_order, memory_order );
00818 friend void atomic_fence( const volatile atomic_long*, memory_order );
00819 friend long atomic_fetch_add_explicit( volatile atomic_long*,
00820 long, memory_order );
00821 friend long atomic_fetch_sub_explicit( volatile atomic_long*,
00822 long, memory_order );
00823 friend long atomic_fetch_and_explicit( volatile atomic_long*,
00824 long, memory_order );
00825 friend long atomic_fetch_or_explicit( volatile atomic_long*,
00826 long, memory_order );
00827 friend long atomic_fetch_xor_explicit( volatile atomic_long*,
00828 long, memory_order );
00829
00830 CPP0X(private:)
00831 #endif
00832 volatile long __f__;
00833 } ALIGN_DEF atomic_long;
00834
00835
00836 typedef struct atomic_ulong
00837 {
00838 #ifdef __cplusplus
00839 bool is_lock_free() const volatile;
00840 void store( unsigned long,
00841 memory_order = memory_order_seq_cst ) volatile;
00842 unsigned long load( memory_order = memory_order_seq_cst ) volatile;
00843 unsigned long swap( unsigned long,
00844 memory_order = memory_order_seq_cst ) volatile;
00845 bool compare_swap( unsigned long&, unsigned long,
00846 memory_order, memory_order ) volatile;
00847 bool compare_swap( unsigned long&, unsigned long,
00848 memory_order = memory_order_seq_cst ) volatile;
00849 void fence( memory_order ) const volatile;
00850 unsigned long fetch_add( unsigned long,
00851 memory_order = memory_order_seq_cst ) volatile;
00852 unsigned long fetch_sub( unsigned long,
00853 memory_order = memory_order_seq_cst ) volatile;
00854 unsigned long fetch_and( unsigned long,
00855 memory_order = memory_order_seq_cst ) volatile;
00856 unsigned long fetch_or( unsigned long,
00857 memory_order = memory_order_seq_cst ) volatile;
00858 unsigned long fetch_xor( unsigned long,
00859 memory_order = memory_order_seq_cst ) volatile;
00860
00861 CPP0X( atomic_ulong() = default; )
00862 CPP0X( constexpr atomic_ulong( unsigned long __v__ ) : __f__( __v__) { } )
00863 CPP0X( atomic_ulong( const atomic_ulong& ) = delete; )
00864 atomic_ulong& operator =( const atomic_ulong& ) CPP0X(=delete);
00865
00866 unsigned long operator =( unsigned long __v__ ) volatile
00867 { store( __v__ ); return __v__; }
00868
00869 unsigned long operator ++( int ) volatile
00870 { return fetch_add( 1 ); }
00871
00872 unsigned long operator --( int ) volatile
00873 { return fetch_sub( 1 ); }
00874
00875 unsigned long operator ++() volatile
00876 { return fetch_add( 1 ) + 1; }
00877
00878 unsigned long operator --() volatile
00879 { return fetch_sub( 1 ) - 1; }
00880
00881 unsigned long operator +=( unsigned long __v__ ) volatile
00882 { return fetch_add( __v__ ) + __v__; }
00883
00884 unsigned long operator -=( unsigned long __v__ ) volatile
00885 { return fetch_sub( __v__ ) - __v__; }
00886
00887 unsigned long operator &=( unsigned long __v__ ) volatile
00888 { return fetch_and( __v__ ) & __v__; }
00889
00890 unsigned long operator |=( unsigned long __v__ ) volatile
00891 { return fetch_or( __v__ ) | __v__; }
00892
00893 unsigned long operator ^=( unsigned long __v__ ) volatile
00894 { return fetch_xor( __v__ ) ^ __v__; }
00895
00896 friend void atomic_store_explicit( volatile atomic_ulong*, unsigned long,
00897 memory_order );
00898 friend unsigned long atomic_load_explicit( volatile atomic_ulong*,
00899 memory_order );
00900 friend unsigned long atomic_swap_explicit( volatile atomic_ulong*,
00901 unsigned long, memory_order );
00902 friend bool atomic_compare_swap_explicit( volatile atomic_ulong*,
00903 unsigned long*, unsigned long, memory_order, memory_order );
00904 friend void atomic_fence( const volatile atomic_ulong*, memory_order );
00905 friend unsigned long atomic_fetch_add_explicit( volatile atomic_ulong*,
00906 unsigned long, memory_order );
00907 friend unsigned long atomic_fetch_sub_explicit( volatile atomic_ulong*,
00908 unsigned long, memory_order );
00909 friend unsigned long atomic_fetch_and_explicit( volatile atomic_ulong*,
00910 unsigned long, memory_order );
00911 friend unsigned long atomic_fetch_or_explicit( volatile atomic_ulong*,
00912 unsigned long, memory_order );
00913 friend unsigned long atomic_fetch_xor_explicit( volatile atomic_ulong*,
00914 unsigned long, memory_order );
00915
00916 CPP0X(private:)
00917 #endif
00918 volatile unsigned long __f__;
00919 } ALIGN_DEF atomic_ulong;
00920
00921
00922 typedef struct atomic_llong
00923 {
00924 #ifdef __cplusplus
00925 bool is_lock_free() const volatile;
00926 void store( long long,
00927 memory_order = memory_order_seq_cst ) volatile;
00928 long long load( memory_order = memory_order_seq_cst ) volatile;
00929 long long swap( long long,
00930 memory_order = memory_order_seq_cst ) volatile;
00931 bool compare_swap( long long&, long long,
00932 memory_order, memory_order ) volatile;
00933 bool compare_swap( long long&, long long,
00934 memory_order = memory_order_seq_cst ) volatile;
00935 void fence( memory_order ) const volatile;
00936 long long fetch_add( long long,
00937 memory_order = memory_order_seq_cst ) volatile;
00938 long long fetch_sub( long long,
00939 memory_order = memory_order_seq_cst ) volatile;
00940 long long fetch_and( long long,
00941 memory_order = memory_order_seq_cst ) volatile;
00942 long long fetch_or( long long,
00943 memory_order = memory_order_seq_cst ) volatile;
00944 long long fetch_xor( long long,
00945 memory_order = memory_order_seq_cst ) volatile;
00946
00947 CPP0X( atomic_llong() = default; )
00948 CPP0X( constexpr atomic_llong( long long __v__ ) : __f__( __v__) { } )
00949 CPP0X( atomic_llong( const atomic_llong& ) = delete; )
00950 atomic_llong& operator =( const atomic_llong& ) CPP0X(=delete);
00951
00952 long long operator =( long long __v__ ) volatile
00953 { store( __v__ ); return __v__; }
00954
00955 long long operator ++( int ) volatile
00956 { return fetch_add( 1 ); }
00957
00958 long long operator --( int ) volatile
00959 { return fetch_sub( 1 ); }
00960
00961 long long operator ++() volatile
00962 { return fetch_add( 1 ) + 1; }
00963
00964 long long operator --() volatile
00965 { return fetch_sub( 1 ) - 1; }
00966
00967 long long operator +=( long long __v__ ) volatile
00968 { return fetch_add( __v__ ) + __v__; }
00969
00970 long long operator -=( long long __v__ ) volatile
00971 { return fetch_sub( __v__ ) - __v__; }
00972
00973 long long operator &=( long long __v__ ) volatile
00974 { return fetch_and( __v__ ) & __v__; }
00975
00976 long long operator |=( long long __v__ ) volatile
00977 { return fetch_or( __v__ ) | __v__; }
00978
00979 long long operator ^=( long long __v__ ) volatile
00980 { return fetch_xor( __v__ ) ^ __v__; }
00981
00982 friend void atomic_store_explicit( volatile atomic_llong*, long long,
00983 memory_order );
00984 friend long long atomic_load_explicit( volatile atomic_llong*,
00985 memory_order );
00986 friend long long atomic_swap_explicit( volatile atomic_llong*,
00987 long long, memory_order );
00988 friend bool atomic_compare_swap_explicit( volatile atomic_llong*,
00989 long long*, long long, memory_order, memory_order );
00990 friend void atomic_fence( const volatile atomic_llong*, memory_order );
00991 friend long long atomic_fetch_add_explicit( volatile atomic_llong*,
00992 long long, memory_order );
00993 friend long long atomic_fetch_sub_explicit( volatile atomic_llong*,
00994 long long, memory_order );
00995 friend long long atomic_fetch_and_explicit( volatile atomic_llong*,
00996 long long, memory_order );
00997 friend long long atomic_fetch_or_explicit( volatile atomic_llong*,
00998 long long, memory_order );
00999 friend long long atomic_fetch_xor_explicit( volatile atomic_llong*,
01000 long long, memory_order );
01001
01002 CPP0X(private:)
01003 #endif
01004 volatile long long __f__;
01005 }ALIGN_DEF atomic_llong;
01006
01007
01008 typedef struct atomic_ullong
01009 {
01010 #ifdef __cplusplus
01011 bool is_lock_free() const volatile;
01012 void store( unsigned long long,
01013 memory_order = memory_order_seq_cst ) volatile;
01014 unsigned long long load( memory_order = memory_order_seq_cst ) volatile;
01015 unsigned long long swap( unsigned long long,
01016 memory_order = memory_order_seq_cst ) volatile;
01017 bool compare_swap( unsigned long long&, unsigned long long,
01018 memory_order, memory_order ) volatile;
01019 bool compare_swap( unsigned long long&, unsigned long long,
01020 memory_order = memory_order_seq_cst ) volatile;
01021 void fence( memory_order ) const volatile;
01022 unsigned long long fetch_add( unsigned long long,
01023 memory_order = memory_order_seq_cst ) volatile;
01024 unsigned long long fetch_sub( unsigned long long,
01025 memory_order = memory_order_seq_cst ) volatile;
01026 unsigned long long fetch_and( unsigned long long,
01027 memory_order = memory_order_seq_cst ) volatile;
01028 unsigned long long fetch_or( unsigned long long,
01029 memory_order = memory_order_seq_cst ) volatile;
01030 unsigned long long fetch_xor( unsigned long long,
01031 memory_order = memory_order_seq_cst ) volatile;
01032
01033 CPP0X( atomic_ullong() = default; )
01034 CPP0X( constexpr atomic_ullong( unsigned long long __v__ ) : __f__( __v__) { } )
01035 CPP0X( atomic_ullong( const atomic_ullong& ) = delete; )
01036 atomic_ullong& operator =( const atomic_ullong& ) CPP0X(=delete);
01037
01038 unsigned long long operator =( unsigned long long __v__ ) volatile
01039 { store( __v__ ); return __v__; }
01040
01041 unsigned long long operator ++( int ) volatile
01042 { return fetch_add( 1 ); }
01043
01044 unsigned long long operator --( int ) volatile
01045 { return fetch_sub( 1 ); }
01046
01047 unsigned long long operator ++() volatile
01048 { return fetch_add( 1 ) + 1; }
01049
01050 unsigned long long operator --() volatile
01051 { return fetch_sub( 1 ) - 1; }
01052
01053 unsigned long long operator +=( unsigned long long __v__ ) volatile
01054 { return fetch_add( __v__ ) + __v__; }
01055
01056 unsigned long long operator -=( unsigned long long __v__ ) volatile
01057 { return fetch_sub( __v__ ) - __v__; }
01058
01059 unsigned long long operator &=( unsigned long long __v__ ) volatile
01060 { return fetch_and( __v__ ) & __v__; }
01061
01062 unsigned long long operator |=( unsigned long long __v__ ) volatile
01063 { return fetch_or( __v__ ) | __v__; }
01064
01065 unsigned long long operator ^=( unsigned long long __v__ ) volatile
01066 { return fetch_xor( __v__ ) ^ __v__; }
01067
01068 friend void atomic_store_explicit( volatile atomic_ullong*, unsigned long long,
01069 memory_order );
01070 friend unsigned long long atomic_load_explicit( volatile atomic_ullong*,
01071 memory_order );
01072 friend unsigned long long atomic_swap_explicit( volatile atomic_ullong*,
01073 unsigned long long, memory_order );
01074 friend bool atomic_compare_swap_explicit( volatile atomic_ullong*,
01075 unsigned long long*, unsigned long long, memory_order, memory_order );
01076 friend void atomic_fence( const volatile atomic_ullong*, memory_order );
01077 friend unsigned long long atomic_fetch_add_explicit( volatile atomic_ullong*,
01078 unsigned long long, memory_order );
01079 friend unsigned long long atomic_fetch_sub_explicit( volatile atomic_ullong*,
01080 unsigned long long, memory_order );
01081 friend unsigned long long atomic_fetch_and_explicit( volatile atomic_ullong*,
01082 unsigned long long, memory_order );
01083 friend unsigned long long atomic_fetch_or_explicit( volatile atomic_ullong*,
01084 unsigned long long, memory_order );
01085 friend unsigned long long atomic_fetch_xor_explicit( volatile atomic_ullong*,
01086 unsigned long long, memory_order );
01087
01088 CPP0X(private:)
01089 #endif
01090 volatile unsigned long long __f__;
01091 }ALIGN_DEF atomic_ullong;
01092
01093
01094 typedef atomic_schar atomic_int_least8_t;
01095 typedef atomic_uchar atomic_uint_least8_t;
01096 typedef atomic_short atomic_int_least16_t;
01097 typedef atomic_ushort atomic_uint_least16_t;
01098 typedef atomic_int atomic_int_least32_t;
01099 typedef atomic_uint atomic_uint_least32_t;
01100 typedef atomic_llong atomic_int_least64_t;
01101 typedef atomic_ullong atomic_uint_least64_t;
01102
01103 typedef atomic_schar atomic_int_fast8_t;
01104 typedef atomic_uchar atomic_uint_fast8_t;
01105 typedef atomic_short atomic_int_fast16_t;
01106 typedef atomic_ushort atomic_uint_fast16_t;
01107 typedef atomic_int atomic_int_fast32_t;
01108 typedef atomic_uint atomic_uint_fast32_t;
01109 typedef atomic_llong atomic_int_fast64_t;
01110 typedef atomic_ullong atomic_uint_fast64_t;
01111
01112 typedef atomic_long atomic_intptr_t;
01113 typedef atomic_ulong atomic_uintptr_t;
01114
01115 typedef atomic_long atomic_ssize_t;
01116 typedef atomic_ulong atomic_size_t;
01117
01118 typedef atomic_long atomic_ptrdiff_t;
01119
01120 typedef atomic_llong atomic_intmax_t;
01121 typedef atomic_ullong atomic_uintmax_t;
01122
01123
01124 #ifdef __cplusplus
01125
01126
01127 typedef struct atomic_wchar_t
01128 {
01129 #ifdef __cplusplus
01130 bool is_lock_free() const volatile;
01131 void store( wchar_t, memory_order = memory_order_seq_cst ) volatile;
01132 wchar_t load( memory_order = memory_order_seq_cst ) volatile;
01133 wchar_t swap( wchar_t,
01134 memory_order = memory_order_seq_cst ) volatile;
01135 bool compare_swap( wchar_t&, wchar_t,
01136 memory_order, memory_order ) volatile;
01137 bool compare_swap( wchar_t&, wchar_t,
01138 memory_order = memory_order_seq_cst ) volatile;
01139 void fence( memory_order ) const volatile;
01140 wchar_t fetch_add( wchar_t,
01141 memory_order = memory_order_seq_cst ) volatile;
01142 wchar_t fetch_sub( wchar_t,
01143 memory_order = memory_order_seq_cst ) volatile;
01144 wchar_t fetch_and( wchar_t,
01145 memory_order = memory_order_seq_cst ) volatile;
01146 wchar_t fetch_or( wchar_t,
01147 memory_order = memory_order_seq_cst ) volatile;
01148 wchar_t fetch_xor( wchar_t,
01149 memory_order = memory_order_seq_cst ) volatile;
01150
01151 CPP0X( atomic_wchar_t() = default; )
01152 CPP0X( constexpr atomic_wchar_t( wchar_t __v__ ) : __f__( __v__) { } )
01153 CPP0X( atomic_wchar_t( const atomic_wchar_t& ) = delete; )
01154 atomic_wchar_t& operator =( const atomic_wchar_t& ) CPP0X(=delete);
01155
01156 wchar_t operator =( wchar_t __v__ ) volatile
01157 { store( __v__ ); return __v__; }
01158
01159 wchar_t operator ++( int ) volatile
01160 { return fetch_add( 1 ); }
01161
01162 wchar_t operator --( int ) volatile
01163 { return fetch_sub( 1 ); }
01164
01165 wchar_t operator ++() volatile
01166 { return fetch_add( 1 ) + 1; }
01167
01168 wchar_t operator --() volatile
01169 { return fetch_sub( 1 ) - 1; }
01170
01171 wchar_t operator +=( wchar_t __v__ ) volatile
01172 { return fetch_add( __v__ ) + __v__; }
01173
01174 wchar_t operator -=( wchar_t __v__ ) volatile
01175 { return fetch_sub( __v__ ) - __v__; }
01176
01177 wchar_t operator &=( wchar_t __v__ ) volatile
01178 { return fetch_and( __v__ ) & __v__; }
01179
01180 wchar_t operator |=( wchar_t __v__ ) volatile
01181 { return fetch_or( __v__ ) | __v__; }
01182
01183 wchar_t operator ^=( wchar_t __v__ ) volatile
01184 { return fetch_xor( __v__ ) ^ __v__; }
01185
01186 friend void atomic_store_explicit( volatile atomic_wchar_t*, wchar_t,
01187 memory_order );
01188 friend wchar_t atomic_load_explicit( volatile atomic_wchar_t*,
01189 memory_order );
01190 friend wchar_t atomic_swap_explicit( volatile atomic_wchar_t*,
01191 wchar_t, memory_order );
01192 friend bool atomic_compare_swap_explicit( volatile atomic_wchar_t*,
01193 wchar_t*, wchar_t, memory_order, memory_order );
01194 friend void atomic_fence( const volatile atomic_wchar_t*, memory_order );
01195 friend wchar_t atomic_fetch_add_explicit( volatile atomic_wchar_t*,
01196 wchar_t, memory_order );
01197 friend wchar_t atomic_fetch_sub_explicit( volatile atomic_wchar_t*,
01198 wchar_t, memory_order );
01199 friend wchar_t atomic_fetch_and_explicit( volatile atomic_wchar_t*,
01200 wchar_t, memory_order );
01201 friend wchar_t atomic_fetch_or_explicit( volatile atomic_wchar_t*,
01202 wchar_t, memory_order );
01203 friend wchar_t atomic_fetch_xor_explicit( volatile atomic_wchar_t*,
01204 wchar_t, memory_order );
01205
01206 CPP0X(private:)
01207 #endif
01208 volatile wchar_t __f__;
01209 }ALIGN_DEF atomic_wchar_t;
01210
01211
01212 #else
01213
01214 typedef atomic_int_least16_t atomic_char16_t;
01215 typedef atomic_int_least32_t atomic_char32_t;
01216 typedef atomic_int_least32_t atomic_wchar_t;
01217
01218 #endif
01219
01220
01221 #ifdef __cplusplus
01222
01223 template< typename T >
01224 struct atomic
01225 {
01226 #ifdef __cplusplus
01227
01228 bool is_lock_free() const volatile;
01229 void store( T, memory_order = memory_order_seq_cst ) volatile;
01230 T load( memory_order = memory_order_seq_cst ) volatile;
01231 T swap( T __v__, memory_order = memory_order_seq_cst ) volatile;
01232 bool compare_swap( T&, T, memory_order, memory_order ) volatile;
01233 bool compare_swap( T&, T, memory_order = memory_order_seq_cst ) volatile;
01234 void fence( memory_order ) const volatile;
01235
01236 CPP0X( atomic() = default; )
01237 CPP0X( constexpr explicit atomic( T __v__ ) : __f__( __v__ ) { } )
01238 CPP0X( atomic( const atomic& ) = delete; )
01239 atomic& operator =( const atomic& ) CPP0X(=delete);
01240
01241 T operator =( T __v__ ) volatile
01242 { store( __v__ ); return __v__; }
01243
01244 CPP0X(private:)
01245 #endif
01246 volatile T __f__;
01247 }ALIGN_DEF;
01248
01249 #endif
01250
01251 #ifdef __cplusplus
01252
01253 template<typename T> struct atomic< T* > : atomic_address
01254 {
01255 T* load( memory_order = memory_order_seq_cst ) const volatile;
01256 T* swap( T*, memory_order = memory_order_seq_cst ) volatile;
01257 bool compare_swap( T*&, T*, memory_order, memory_order ) volatile;
01258 bool compare_swap( T*&, T*,
01259 memory_order = memory_order_seq_cst ) volatile;
01260 T* fetch_add( ptrdiff_t, memory_order = memory_order_seq_cst ) volatile;
01261 T* fetch_sub( ptrdiff_t, memory_order = memory_order_seq_cst ) volatile;
01262
01263 CPP0X( atomic() = default; )
01264 CPP0X( constexpr explicit atomic( T __v__ ) : atomic_address( __v__ ) { } )
01265 CPP0X( atomic( const atomic& ) = delete; )
01266 atomic& operator =( const atomic& ) CPP0X(=delete);
01267
01268 T* operator =( T* __v__ ) volatile
01269 { store( __v__ ); return __v__; }
01270
01271 T* operator ++( int ) volatile
01272 { return fetch_add( 1 ); }
01273
01274 T* operator --( int ) volatile
01275 { return fetch_sub( 1 ); }
01276
01277 T* operator ++() volatile
01278 { return fetch_add( 1 ) + 1; }
01279
01280 T* operator --() volatile
01281 { return fetch_sub( 1 ) - 1; }
01282
01283 T* operator +=( T* __v__ ) volatile
01284 { return fetch_add( __v__ ) + __v__; }
01285
01286 T* operator -=( T* __v__ ) volatile
01287 { return fetch_sub( __v__ ) - __v__; }
01288 }ALIGN_DEF;
01289
01290 #endif
01291
01292 #ifdef __cplusplus
01293
01294
01295 template<> struct atomic< bool > : atomic_bool
01296 {
01297 CPP0X( atomic() = default; )
01298 CPP0X( constexpr explicit atomic( bool __v__ )
01299 : atomic_bool( __v__ ) { } )
01300 CPP0X( atomic( const atomic& ) = delete; )
01301 atomic& operator =( const atomic& ) CPP0X(=delete);
01302
01303 bool operator =( bool __v__ ) volatile
01304 { store( __v__ ); return __v__; }
01305 };
01306
01307
01308 template<> struct atomic< void* > : atomic_address
01309 {
01310 CPP0X( atomic() = default; )
01311 CPP0X( constexpr explicit atomic( void* __v__ )
01312 : atomic_address( __v__ ) { } )
01313 CPP0X( atomic( const atomic& ) = delete; )
01314 atomic& operator =( const atomic& ) CPP0X(=delete);
01315
01316 void* operator =( void* __v__ ) volatile
01317 { store( __v__ ); return __v__; }
01318 };
01319
01320
01321 template<> struct atomic< char > : atomic_char
01322 {
01323 CPP0X( atomic() = default; )
01324 CPP0X( constexpr explicit atomic( char __v__ )
01325 : atomic_char( __v__ ) { } )
01326 CPP0X( atomic( const atomic& ) = delete; )
01327 atomic& operator =( const atomic& ) CPP0X(=delete);
01328
01329 char operator =( char __v__ ) volatile
01330 { store( __v__ ); return __v__; }
01331 };
01332
01333
01334 template<> struct atomic< signed char > : atomic_schar
01335 {
01336 CPP0X( atomic() = default; )
01337 CPP0X( constexpr explicit atomic( signed char __v__ )
01338 : atomic_schar( __v__ ) { } )
01339 CPP0X( atomic( const atomic& ) = delete; )
01340 atomic& operator =( const atomic& ) CPP0X(=delete);
01341
01342 signed char operator =( signed char __v__ ) volatile
01343 { store( __v__ ); return __v__; }
01344 };
01345
01346
01347 template<> struct atomic< unsigned char > : atomic_uchar
01348 {
01349 CPP0X( atomic() = default; )
01350 CPP0X( constexpr explicit atomic( unsigned char __v__ )
01351 : atomic_uchar( __v__ ) { } )
01352 CPP0X( atomic( const atomic& ) = delete; )
01353 atomic& operator =( const atomic& ) CPP0X(=delete);
01354
01355 unsigned char operator =( unsigned char __v__ ) volatile
01356 { store( __v__ ); return __v__; }
01357 };
01358
01359
01360 template<> struct atomic< short > : atomic_short
01361 {
01362 CPP0X( atomic() = default; )
01363 CPP0X( constexpr explicit atomic( short __v__ )
01364 : atomic_short( __v__ ) { } )
01365 CPP0X( atomic( const atomic& ) = delete; )
01366 atomic& operator =( const atomic& ) CPP0X(=delete);
01367
01368 short operator =( short __v__ ) volatile
01369 { store( __v__ ); return __v__; }
01370 };
01371
01372
01373 template<> struct atomic< unsigned short > : atomic_ushort
01374 {
01375 CPP0X( atomic() = default; )
01376 CPP0X( constexpr explicit atomic( unsigned short __v__ )
01377 : atomic_ushort( __v__ ) { } )
01378 CPP0X( atomic( const atomic& ) = delete; )
01379 atomic& operator =( const atomic& ) CPP0X(=delete);
01380
01381 unsigned short operator =( unsigned short __v__ ) volatile
01382 { store( __v__ ); return __v__; }
01383 };
01384
01385
01386 template<> struct atomic< int > : atomic_int
01387 {
01388 CPP0X( atomic() = default; )
01389 CPP0X( constexpr explicit atomic( int __v__ )
01390 : atomic_int( __v__ ) { } )
01391 CPP0X( atomic( const atomic& ) = delete; )
01392 atomic& operator =( const atomic& ) CPP0X(=delete);
01393
01394 int operator =( int __v__ ) volatile
01395 { store( __v__ ); return __v__; }
01396 };
01397
01398
01399 template<> struct atomic< unsigned int > : atomic_uint
01400 {
01401 CPP0X( atomic() = default; )
01402 CPP0X( constexpr explicit atomic( unsigned int __v__ )
01403 : atomic_uint( __v__ ) { } )
01404 CPP0X( atomic( const atomic& ) = delete; )
01405 atomic& operator =( const atomic& ) CPP0X(=delete);
01406
01407 unsigned int operator =( unsigned int __v__ ) volatile
01408 { store( __v__ ); return __v__; }
01409 };
01410
01411
01412 template<> struct atomic< long > : atomic_long
01413 {
01414 CPP0X( atomic() = default; )
01415 CPP0X( constexpr explicit atomic( long __v__ )
01416 : atomic_long( __v__ ) { } )
01417 CPP0X( atomic( const atomic& ) = delete; )
01418 atomic& operator =( const atomic& ) CPP0X(=delete);
01419
01420 long operator =( long __v__ ) volatile
01421 { store( __v__ ); return __v__; }
01422 };
01423
01424
01425 template<> struct atomic< unsigned long > : atomic_ulong
01426 {
01427 CPP0X( atomic() = default; )
01428 CPP0X( constexpr explicit atomic( unsigned long __v__ )
01429 : atomic_ulong( __v__ ) { } )
01430 CPP0X( atomic( const atomic& ) = delete; )
01431 atomic& operator =( const atomic& ) CPP0X(=delete);
01432
01433 unsigned long operator =( unsigned long __v__ ) volatile
01434 { store( __v__ ); return __v__; }
01435 };
01436
01437
01438 template<> struct atomic< long long > : atomic_llong
01439 {
01440 CPP0X( atomic() = default; )
01441 CPP0X( constexpr explicit atomic( long long __v__ )
01442 : atomic_llong( __v__ ) { } )
01443 CPP0X( atomic( const atomic& ) = delete; )
01444 atomic& operator =( const atomic& ) CPP0X(=delete);
01445
01446 long long operator =( long long __v__ ) volatile
01447 { store( __v__ ); return __v__; }
01448 };
01449
01450
01451 template<> struct atomic< unsigned long long > : atomic_ullong
01452 {
01453 CPP0X( atomic() = default; )
01454 CPP0X( constexpr explicit atomic( unsigned long long __v__ )
01455 : atomic_ullong( __v__ ) { } )
01456 CPP0X( atomic( const atomic& ) = delete; )
01457 atomic& operator =( const atomic& ) CPP0X(=delete);
01458
01459 unsigned long long operator =( unsigned long long __v__ ) volatile
01460 { store( __v__ ); return __v__; }
01461 };
01462
01463
01464 template<> struct atomic< wchar_t > : atomic_wchar_t
01465 {
01466 CPP0X( atomic() = default; )
01467 CPP0X( constexpr explicit atomic( wchar_t __v__ )
01468 : atomic_wchar_t( __v__ ) { } )
01469 CPP0X( atomic( const atomic& ) = delete; )
01470 atomic& operator =( const atomic& ) CPP0X(=delete);
01471
01472 wchar_t operator =( wchar_t __v__ ) volatile
01473 { store( __v__ ); return __v__; }
01474 };
01475
01476
01477 #endif
01478
01479
01480 #ifdef __cplusplus
01481
01482 inline bool atomic_is_lock_free( const volatile atomic_bool* __a__ )
01483 { return true; }
01484
01485 inline bool atomic_load_explicit( volatile atomic_bool* __a__, memory_order __x__ )
01486 {
01487 return _ATOMIC_LOAD_( __a__, __x__ );
01488 }
01489
01490 inline bool atomic_load( volatile atomic_bool* __a__ )
01491 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
01492
01493 inline void atomic_store_explicit
01494 ( volatile atomic_bool* __a__, bool __m__, memory_order __x__ )
01495 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
01496
01497 inline void atomic_store
01498 ( volatile atomic_bool* __a__, bool __m__ )
01499 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
01500
01501 inline bool atomic_swap_explicit
01502 ( volatile atomic_bool* __a__, bool __m__, memory_order __x__ )
01503 { return _ATOMIC_MODIFY_( __a__, ops_swap, __m__, __x__ ); }
01504
01505 inline bool atomic_swap
01506 ( volatile atomic_bool* __a__, bool __m__ )
01507 { return atomic_swap_explicit( __a__, __m__, memory_order_seq_cst ); }
01508
01509 inline bool atomic_compare_swap_explicit
01510 ( volatile atomic_bool* __a__, bool* __e__, bool __m__,
01511 memory_order __x__, memory_order __y__ )
01512 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
01513
01514 inline bool atomic_compare_swap
01515 ( volatile atomic_bool* __a__, bool* __e__, bool __m__ )
01516 { return atomic_compare_swap_explicit( __a__, __e__, __m__,
01517 memory_order_seq_cst, memory_order_seq_cst ); }
01518
01519 inline void atomic_fence
01520 ( const volatile atomic_bool* __a__, memory_order __x__ )
01521 { _ATOMIC_FENCE_( __a__, __x__ ); }
01522
01523
01524 inline bool atomic_is_lock_free( const volatile atomic_address* __a__ )
01525 { return true; }
01526
01527 inline void* atomic_load_explicit
01528 ( const volatile atomic_address* const __a__, memory_order __x__ )
01529 { return _ATOMIC_LOAD_((volatile atomic_address * ) __a__, __x__ ); }
01530
01531 inline void* atomic_load( volatile atomic_address* __a__ )
01532 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
01533
01534 inline void atomic_store_explicit
01535 ( volatile atomic_address* __a__, void* __m__, memory_order __x__ )
01536 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
01537
01538 inline void atomic_store
01539 ( volatile atomic_address* __a__, void* __m__ )
01540 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
01541
01542 inline void* atomic_swap_explicit
01543 ( volatile atomic_address* __a__, void* __m__, memory_order __x__ )
01544 {
01545 void * old;
01546 do{
01547 old = __a__->load(memory_order_relaxed);
01548 }while(__a__->compare_swap(old, __m__));
01549 return old;
01550 }
01551
01552 inline void* atomic_swap
01553 ( volatile atomic_address* __a__, void* __m__ )
01554 { return atomic_swap_explicit( __a__, __m__, memory_order_seq_cst ); }
01555
01556 inline bool atomic_compare_swap_explicit
01557 ( volatile atomic_address* __a__, void** __e__, void* __m__,
01558 memory_order __x__, memory_order __y__ )
01559 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
01560
01561 inline bool atomic_compare_swap
01562 ( volatile atomic_address* __a__, void** __e__, void* __m__ )
01563 { return atomic_compare_swap_explicit( __a__, __e__, __m__,
01564 memory_order_seq_cst, memory_order_seq_cst ); }
01565
01566 inline void atomic_fence
01567 ( const volatile atomic_address* __a__, memory_order __x__ )
01568 { _ATOMIC_FENCE_( __a__, __x__ ); }
01569
01570
01571 inline bool atomic_is_lock_free( const volatile atomic_char* __a__ )
01572 { return true; }
01573
01574 inline char atomic_load_explicit
01575 ( volatile atomic_char* __a__, memory_order __x__ )
01576 { return _ATOMIC_LOAD_( __a__, __x__ ); }
01577
01578 inline char atomic_load( volatile atomic_char* __a__ )
01579 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
01580
01581 inline void atomic_store_explicit
01582 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
01583 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
01584
01585 inline void atomic_store
01586 ( volatile atomic_char* __a__, char __m__ )
01587 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
01588
01589 inline char atomic_swap_explicit
01590 ( volatile atomic_char* __a__, char __m__, memory_order __x__ )
01591 { return _ATOMIC_MODIFY_( __a__, ops_swap, __m__, __x__ ); }
01592
01593 inline char atomic_swap
01594 ( volatile atomic_char* __a__, char __m__ )
01595 { return atomic_swap_explicit( __a__, __m__, memory_order_seq_cst ); }
01596
01597 inline bool atomic_compare_swap_explicit
01598 ( volatile atomic_char* __a__, char* __e__, char __m__,
01599 memory_order __x__, memory_order __y__ )
01600 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
01601
01602 inline bool atomic_compare_swap
01603 ( volatile atomic_char* __a__, char* __e__, char __m__ )
01604 { return atomic_compare_swap_explicit( __a__, __e__, __m__,
01605 memory_order_seq_cst, memory_order_seq_cst ); }
01606
01607 inline void atomic_fence
01608 ( const volatile atomic_char* __a__, memory_order __x__ )
01609 { _ATOMIC_FENCE_( __a__, __x__ ); }
01610
01611
01612 inline bool atomic_is_lock_free( const volatile atomic_schar* __a__ )
01613 { return true; }
01614
01615 inline signed char atomic_load_explicit
01616 ( volatile atomic_schar* __a__, memory_order __x__ )
01617 { return _ATOMIC_LOAD_( __a__, __x__ ); }
01618
01619 inline signed char atomic_load( volatile atomic_schar* __a__ )
01620 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
01621
01622 inline void atomic_store_explicit
01623 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
01624 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
01625
01626 inline void atomic_store
01627 ( volatile atomic_schar* __a__, signed char __m__ )
01628 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
01629
01630 inline signed char atomic_swap_explicit
01631 ( volatile atomic_schar* __a__, signed char __m__, memory_order __x__ )
01632 { return _ATOMIC_MODIFY_( __a__, ops_swap, __m__, __x__ ); }
01633
01634 inline signed char atomic_swap
01635 ( volatile atomic_schar* __a__, signed char __m__ )
01636 { return atomic_swap_explicit( __a__, __m__, memory_order_seq_cst ); }
01637
01638 inline bool atomic_compare_swap_explicit
01639 ( volatile atomic_schar* __a__, signed char* __e__, signed char __m__,
01640 memory_order __x__, memory_order __y__ )
01641 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
01642
01643 inline bool atomic_compare_swap
01644 ( volatile atomic_schar* __a__, signed char* __e__, signed char __m__ )
01645 { return atomic_compare_swap_explicit( __a__, __e__, __m__,
01646 memory_order_seq_cst, memory_order_seq_cst ); }
01647
01648 inline void atomic_fence
01649 ( const volatile atomic_schar* __a__, memory_order __x__ )
01650 { _ATOMIC_FENCE_( __a__, __x__ ); }
01651
01652
01653 inline bool atomic_is_lock_free( const volatile atomic_uchar* __a__ )
01654 { return true; }
01655
01656 inline unsigned char atomic_load_explicit
01657 ( volatile atomic_uchar* __a__, memory_order __x__ )
01658 { return _ATOMIC_LOAD_( __a__, __x__ ); }
01659
01660 inline unsigned char atomic_load( volatile atomic_uchar* __a__ )
01661 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
01662
01663 inline void atomic_store_explicit
01664 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
01665 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
01666
01667 inline void atomic_store
01668 ( volatile atomic_uchar* __a__, unsigned char __m__ )
01669 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
01670
01671 inline unsigned char atomic_swap_explicit
01672 ( volatile atomic_uchar* __a__, unsigned char __m__, memory_order __x__ )
01673 { return _ATOMIC_MODIFY_( __a__, ops_swap, __m__, __x__ ); }
01674
01675 inline unsigned char atomic_swap
01676 ( volatile atomic_uchar* __a__, unsigned char __m__ )
01677 { return atomic_swap_explicit( __a__, __m__, memory_order_seq_cst ); }
01678
01679 inline bool atomic_compare_swap_explicit
01680 ( volatile atomic_uchar* __a__, unsigned char* __e__, unsigned char __m__,
01681 memory_order __x__, memory_order __y__ )
01682 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
01683
01684 inline bool atomic_compare_swap
01685 ( volatile atomic_uchar* __a__, unsigned char* __e__, unsigned char __m__ )
01686 { return atomic_compare_swap_explicit( __a__, __e__, __m__,
01687 memory_order_seq_cst, memory_order_seq_cst ); }
01688
01689 inline void atomic_fence
01690 ( const volatile atomic_uchar* __a__, memory_order __x__ )
01691 { _ATOMIC_FENCE_( __a__, __x__ ); }
01692
01693
01694 inline bool atomic_is_lock_free( const volatile atomic_short* __a__ )
01695 { return true; }
01696
01697 inline short atomic_load_explicit
01698 ( volatile atomic_short* __a__, memory_order __x__ )
01699 { return _ATOMIC_LOAD_( __a__, __x__ ); }
01700
01701 inline short atomic_load( volatile atomic_short* __a__ )
01702 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
01703
01704 inline void atomic_store_explicit
01705 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
01706 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
01707
01708 inline void atomic_store
01709 ( volatile atomic_short* __a__, short __m__ )
01710 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
01711
01712 inline short atomic_swap_explicit
01713 ( volatile atomic_short* __a__, short __m__, memory_order __x__ )
01714 { return _ATOMIC_MODIFY_( __a__, ops_swap, __m__, __x__ ); }
01715
01716 inline short atomic_swap
01717 ( volatile atomic_short* __a__, short __m__ )
01718 { return atomic_swap_explicit( __a__, __m__, memory_order_seq_cst ); }
01719
01720 inline bool atomic_compare_swap_explicit
01721 ( volatile atomic_short* __a__, short* __e__, short __m__,
01722 memory_order __x__, memory_order __y__ )
01723 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
01724
01725 inline bool atomic_compare_swap
01726 ( volatile atomic_short* __a__, short* __e__, short __m__ )
01727 { return atomic_compare_swap_explicit( __a__, __e__, __m__,
01728 memory_order_seq_cst, memory_order_seq_cst ); }
01729
01730 inline void atomic_fence
01731 ( const volatile atomic_short* __a__, memory_order __x__ )
01732 { _ATOMIC_FENCE_( __a__, __x__ ); }
01733
01734
01735 inline bool atomic_is_lock_free( const volatile atomic_ushort* __a__ )
01736 { return true; }
01737
01738 inline unsigned short atomic_load_explicit
01739 ( volatile atomic_ushort* __a__, memory_order __x__ )
01740 { return _ATOMIC_LOAD_( __a__, __x__ ); }
01741
01742 inline unsigned short atomic_load( volatile atomic_ushort* __a__ )
01743 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
01744
01745 inline void atomic_store_explicit
01746 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
01747 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
01748
01749 inline void atomic_store
01750 ( volatile atomic_ushort* __a__, unsigned short __m__ )
01751 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
01752
01753 inline unsigned short atomic_swap_explicit
01754 ( volatile atomic_ushort* __a__, unsigned short __m__, memory_order __x__ )
01755 { return _ATOMIC_MODIFY_( __a__, ops_swap, __m__, __x__ ); }
01756
01757 inline unsigned short atomic_swap
01758 ( volatile atomic_ushort* __a__, unsigned short __m__ )
01759 { return atomic_swap_explicit( __a__, __m__, memory_order_seq_cst ); }
01760
01761 inline bool atomic_compare_swap_explicit
01762 ( volatile atomic_ushort* __a__, unsigned short* __e__, unsigned short __m__,
01763 memory_order __x__, memory_order __y__ )
01764 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
01765
01766 inline bool atomic_compare_swap
01767 ( volatile atomic_ushort* __a__, unsigned short* __e__, unsigned short __m__ )
01768 { return atomic_compare_swap_explicit( __a__, __e__, __m__,
01769 memory_order_seq_cst, memory_order_seq_cst ); }
01770
01771 inline void atomic_fence
01772 ( const volatile atomic_ushort* __a__, memory_order __x__ )
01773 { _ATOMIC_FENCE_( __a__, __x__ ); }
01774
01775
01776 inline bool atomic_is_lock_free( const volatile atomic_int* __a__ )
01777 { return true; }
01778
01779 inline int atomic_load_explicit
01780 ( volatile atomic_int* __a__, memory_order __x__ )
01781 { return _ATOMIC_LOAD_( __a__, __x__ ); }
01782
01783 inline int atomic_load( volatile atomic_int* __a__ )
01784 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
01785
01786 inline void atomic_store_explicit
01787 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
01788 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
01789
01790 inline void atomic_store
01791 ( volatile atomic_int* __a__, int __m__ )
01792 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
01793
01794 inline int atomic_swap_explicit
01795 ( volatile atomic_int* __a__, int __m__, memory_order __x__ )
01796 { return _ATOMIC_MODIFY_( __a__, ops_swap, __m__, __x__ ); }
01797
01798 inline int atomic_swap
01799 ( volatile atomic_int* __a__, int __m__ )
01800 { return atomic_swap_explicit( __a__, __m__, memory_order_seq_cst ); }
01801
01802 inline bool atomic_compare_swap_explicit
01803 ( volatile atomic_int* __a__, int* __e__, int __m__,
01804 memory_order __x__, memory_order __y__ )
01805 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
01806
01807 inline bool atomic_compare_swap
01808 ( volatile atomic_int* __a__, int* __e__, int __m__ )
01809 { return atomic_compare_swap_explicit( __a__, __e__, __m__,
01810 memory_order_seq_cst, memory_order_seq_cst ); }
01811
01812 inline void atomic_fence
01813 ( const volatile atomic_int* __a__, memory_order __x__ )
01814 { _ATOMIC_FENCE_( __a__, __x__ ); }
01815
01816
01817 inline bool atomic_is_lock_free( const volatile atomic_uint* __a__ )
01818 { return true; }
01819
01820 inline unsigned int atomic_load_explicit
01821 ( volatile atomic_uint* __a__, memory_order __x__ )
01822 { return _ATOMIC_LOAD_( __a__, __x__ ); }
01823
01824 inline unsigned int atomic_load( volatile atomic_uint* __a__ )
01825 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
01826
01827 inline void atomic_store_explicit
01828 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
01829 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
01830
01831 inline void atomic_store
01832 ( volatile atomic_uint* __a__, unsigned int __m__ )
01833 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
01834
01835 inline unsigned int atomic_swap_explicit
01836 ( volatile atomic_uint* __a__, unsigned int __m__, memory_order __x__ )
01837 { return _ATOMIC_MODIFY_( __a__, ops_swap, __m__, __x__ ); }
01838
01839 inline unsigned int atomic_swap
01840 ( volatile atomic_uint* __a__, unsigned int __m__ )
01841 { return atomic_swap_explicit( __a__, __m__, memory_order_seq_cst ); }
01842
01843 inline bool atomic_compare_swap_explicit
01844 ( volatile atomic_uint* __a__, unsigned int* __e__, unsigned int __m__,
01845 memory_order __x__, memory_order __y__ )
01846 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
01847
01848 inline bool atomic_compare_swap
01849 ( volatile atomic_uint* __a__, unsigned int* __e__, unsigned int __m__ )
01850 { return atomic_compare_swap_explicit( __a__, __e__, __m__,
01851 memory_order_seq_cst, memory_order_seq_cst ); }
01852
01853 inline void atomic_fence
01854 ( const volatile atomic_uint* __a__, memory_order __x__ )
01855 { _ATOMIC_FENCE_( __a__, __x__ ); }
01856
01857
01858 inline bool atomic_is_lock_free( const volatile atomic_long* __a__ )
01859 { return true; }
01860
01861 inline long atomic_load_explicit
01862 ( volatile atomic_long* __a__, memory_order __x__ )
01863 { return _ATOMIC_LOAD_( __a__, __x__ ); }
01864
01865 inline long atomic_load( volatile atomic_long* __a__ )
01866 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
01867
01868 inline void atomic_store_explicit
01869 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
01870 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
01871
01872 inline void atomic_store
01873 ( volatile atomic_long* __a__, long __m__ )
01874 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
01875
01876 inline long atomic_swap_explicit
01877 ( volatile atomic_long* __a__, long __m__, memory_order __x__ )
01878 { return _ATOMIC_MODIFY_( __a__, ops_swap, __m__, __x__ ); }
01879
01880 inline long atomic_swap
01881 ( volatile atomic_long* __a__, long __m__ )
01882 { return atomic_swap_explicit( __a__, __m__, memory_order_seq_cst ); }
01883
01884 inline bool atomic_compare_swap_explicit
01885 ( volatile atomic_long* __a__, long* __e__, long __m__,
01886 memory_order __x__, memory_order __y__ )
01887 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
01888
01889 inline bool atomic_compare_swap
01890 ( volatile atomic_long* __a__, long* __e__, long __m__ )
01891 { return atomic_compare_swap_explicit( __a__, __e__, __m__,
01892 memory_order_seq_cst, memory_order_seq_cst ); }
01893
01894 inline void atomic_fence
01895 ( const volatile atomic_long* __a__, memory_order __x__ )
01896 { _ATOMIC_FENCE_( __a__, __x__ ); }
01897
01898
01899 inline bool atomic_is_lock_free( const volatile atomic_ulong* __a__ )
01900 { return true; }
01901
01902 inline unsigned long atomic_load_explicit
01903 ( volatile atomic_ulong* __a__, memory_order __x__ )
01904 { return _ATOMIC_LOAD_( __a__, __x__ ); }
01905
01906 inline unsigned long atomic_load( volatile atomic_ulong* __a__ )
01907 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
01908
01909 inline void atomic_store_explicit
01910 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
01911 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
01912
01913 inline void atomic_store
01914 ( volatile atomic_ulong* __a__, unsigned long __m__ )
01915 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
01916
01917 inline unsigned long atomic_swap_explicit
01918 ( volatile atomic_ulong* __a__, unsigned long __m__, memory_order __x__ )
01919 { return _ATOMIC_MODIFY_( __a__, ops_swap, __m__, __x__ ); }
01920
01921 inline unsigned long atomic_swap
01922 ( volatile atomic_ulong* __a__, unsigned long __m__ )
01923 { return atomic_swap_explicit( __a__, __m__, memory_order_seq_cst ); }
01924
01925 inline bool atomic_compare_swap_explicit
01926 ( volatile atomic_ulong* __a__, unsigned long* __e__, unsigned long __m__,
01927 memory_order __x__, memory_order __y__ )
01928 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
01929
01930 inline bool atomic_compare_swap
01931 ( volatile atomic_ulong* __a__, unsigned long* __e__, unsigned long __m__ )
01932 { return atomic_compare_swap_explicit( __a__, __e__, __m__,
01933 memory_order_seq_cst, memory_order_seq_cst ); }
01934
01935 inline void atomic_fence
01936 ( const volatile atomic_ulong* __a__, memory_order __x__ )
01937 { _ATOMIC_FENCE_( __a__, __x__ ); }
01938
01939
01940 inline bool atomic_is_lock_free( const volatile atomic_llong* __a__ )
01941 { return true; }
01942
01943 inline long long atomic_load_explicit
01944 ( volatile atomic_llong* __a__, memory_order __x__ )
01945 { return _ATOMIC_LOAD_( __a__, __x__ ); }
01946
01947 inline long long atomic_load( volatile atomic_llong* __a__ )
01948 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
01949
01950 inline void atomic_store_explicit
01951 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
01952 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
01953
01954 inline void atomic_store
01955 ( volatile atomic_llong* __a__, long long __m__ )
01956 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
01957
01958 inline long long atomic_swap_explicit
01959 ( volatile atomic_llong* __a__, long long __m__, memory_order __x__ )
01960 { return _ATOMIC_MODIFY_( __a__, ops_swap, __m__, __x__ ); }
01961
01962 inline long long atomic_swap
01963 ( volatile atomic_llong* __a__, long long __m__ )
01964 { return atomic_swap_explicit( __a__, __m__, memory_order_seq_cst ); }
01965
01966 inline bool atomic_compare_swap_explicit
01967 ( volatile atomic_llong* __a__, long long* __e__, long long __m__,
01968 memory_order __x__, memory_order __y__ )
01969 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
01970
01971 inline bool atomic_compare_swap
01972 ( volatile atomic_llong* __a__, long long* __e__, long long __m__ )
01973 { return atomic_compare_swap_explicit( __a__, __e__, __m__,
01974 memory_order_seq_cst, memory_order_seq_cst ); }
01975
01976 inline void atomic_fence
01977 ( const volatile atomic_llong* __a__, memory_order __x__ )
01978 { _ATOMIC_FENCE_( __a__, __x__ ); }
01979
01980
01981 inline bool atomic_is_lock_free( const volatile atomic_ullong* __a__ )
01982 { return true; }
01983
01984 inline unsigned long long atomic_load_explicit
01985 ( volatile atomic_ullong* __a__, memory_order __x__ )
01986 { return _ATOMIC_LOAD_( __a__, __x__ ); }
01987
01988 inline unsigned long long atomic_load( volatile atomic_ullong* __a__ )
01989 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
01990
01991 inline void atomic_store_explicit
01992 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
01993 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
01994
01995 inline void atomic_store
01996 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
01997 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
01998
01999 inline unsigned long long atomic_swap_explicit
02000 ( volatile atomic_ullong* __a__, unsigned long long __m__, memory_order __x__ )
02001 { return _ATOMIC_MODIFY_( __a__, ops_swap, __m__, __x__ ); }
02002
02003 inline unsigned long long atomic_swap
02004 ( volatile atomic_ullong* __a__, unsigned long long __m__ )
02005 { return atomic_swap_explicit( __a__, __m__, memory_order_seq_cst ); }
02006
02007 inline bool atomic_compare_swap_explicit
02008 ( volatile atomic_ullong* __a__, unsigned long long* __e__, unsigned long long __m__,
02009 memory_order __x__, memory_order __y__ )
02010 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
02011
02012 inline bool atomic_compare_swap
02013 ( volatile atomic_ullong* __a__, unsigned long long* __e__, unsigned long long __m__ )
02014 { return atomic_compare_swap_explicit( __a__, __e__, __m__,
02015 memory_order_seq_cst, memory_order_seq_cst ); }
02016
02017 inline void atomic_fence
02018 ( const volatile atomic_ullong* __a__, memory_order __x__ )
02019 { _ATOMIC_FENCE_( __a__, __x__ ); }
02020
02021
02022 inline bool atomic_is_lock_free( const volatile atomic_wchar_t* __a__ )
02023 { return true; }
02024
02025 inline wchar_t atomic_load_explicit
02026 ( volatile atomic_wchar_t* __a__, memory_order __x__ )
02027 { return _ATOMIC_LOAD_( __a__, __x__ ); }
02028
02029 inline wchar_t atomic_load( volatile atomic_wchar_t* __a__ )
02030 { return atomic_load_explicit( __a__, memory_order_seq_cst ); }
02031
02032 inline void atomic_store_explicit
02033 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
02034 { _ATOMIC_STORE_( __a__, __m__, __x__ ); }
02035
02036 inline void atomic_store
02037 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
02038 { atomic_store_explicit( __a__, __m__, memory_order_seq_cst ); }
02039
02040 inline wchar_t atomic_swap_explicit
02041 ( volatile atomic_wchar_t* __a__, wchar_t __m__, memory_order __x__ )
02042 { return _ATOMIC_MODIFY_( __a__, ops_swap, __m__, __x__ ); }
02043
02044 inline wchar_t atomic_swap
02045 ( volatile atomic_wchar_t* __a__, wchar_t __m__ )
02046 { return atomic_swap_explicit( __a__, __m__, memory_order_seq_cst ); }
02047
02048 inline bool atomic_compare_swap_explicit
02049 ( volatile atomic_wchar_t* __a__, wchar_t* __e__, wchar_t __m__,
02050 memory_order __x__, memory_order __y__ )
02051 { return _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ ); }
02052
02053 inline bool atomic_compare_swap
02054 ( volatile atomic_wchar_t* __a__, wchar_t* __e__, wchar_t __m__ )
02055 { return atomic_compare_swap_explicit( __a__, __e__, __m__,
02056 memory_order_seq_cst, memory_order_seq_cst ); }
02057
02058 inline void atomic_fence
02059 ( const volatile atomic_wchar_t* __a__, memory_order __x__ )
02060 { _ATOMIC_FENCE_( __a__, __x__ ); }
02061
02062
02063 #else
02064
02065 #define atomic_is_lock_free( __a__ ) \
02066 true
02067
02068 #define atomic_load( __a__ ) \
02069 _ATOMIC_LOAD_( __a__, memory_order_seq_cst )
02070
02071 #define atomic_load_explicit( __a__, __x__ ) \
02072 _ATOMIC_LOAD_( __a__, __x__ )
02073
02074 #define atomic_store( __a__, __m__ ) \
02075 _ATOMIC_STORE_( __a__, __m__, memory_order_seq_cst )
02076
02077 #define atomic_store_explicit( __a__, __m__, __x__ ) \
02078 _ATOMIC_STORE_( __a__, __m__, __x__ )
02079
02080 #define atomic_swap( __a__, __m__ ) \
02081 _ATOMIC_MODIFY_( __a__, ops_swap, __m__, memory_order_seq_cst )
02082
02083 #define atomic_swap_explicit( __a__, __m__, __x__ ) \
02084 _ATOMIC_MODIFY_( __a__, ops_swap, __m__, __x__ )
02085
02086 #define atomic_compare_swap( __a__, __e__, __m__ ) \
02087 _ATOMIC_CMPSWP_( __a__, __e__, __m__, memory_order_seq_cst )
02088
02089 #define atomic_compare_swap_explicit( __a__, __e__, __m__, __x__, __y__ ) \
02090 _ATOMIC_CMPSWP_( __a__, __e__, __m__, __x__ )
02091
02092 #define atomic_fence( __a__, __x__ ) \
02093 ({ _ATOMIC_FENCE_( __a__, __x__ ); })
02094
02095
02096 #endif
02097
02098
02099 #if defined(GCC) && defined(X86)
02100 #include <amino/arch/x86/gcc_builtins.h>
02101 #endif
02102
02103 #ifdef __cplusplus
02104
02105
02106 inline bool atomic_bool::is_lock_free() const volatile
02107 { return true; }
02108
02109
02110 inline void atomic_bool::store
02111 ( bool __m__, memory_order __x__ ) volatile
02112 { atomic_store_explicit( this, __m__, __x__ ); }
02113
02114 inline bool atomic_bool::load
02115 ( memory_order __x__ ) volatile
02116 { return atomic_load_explicit( this, __x__ ); }
02117
02118 inline bool atomic_bool::swap
02119 ( bool __m__, memory_order __x__ ) volatile
02120 { return atomic_swap_explicit( this, __m__, __x__ ); }
02121
02122 inline bool atomic_bool::compare_swap
02123 ( bool& __e__, bool __m__,
02124 memory_order __x__, memory_order __y__ ) volatile
02125 {
02126 compiler_barrier();
02127 return atomic_compare_swap_explicit( this, &__e__, __m__, __x__, __y__ ); }
02128
02129 inline bool atomic_bool::compare_swap
02130 ( bool& __e__, bool __m__, memory_order __x__ ) volatile
02131 {
02132 compiler_barrier();
02133 return atomic_compare_swap_explicit( this, &__e__, __m__, __x__,
02134 __x__ == memory_order_acq_rel ? memory_order_acquire :
02135 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
02136
02137 inline void atomic_bool::fence
02138 ( memory_order __x__ ) const volatile
02139 { return atomic_fence( this, __x__ ); }
02140
02141
02142 inline bool atomic_address::is_lock_free() const volatile
02143 { return true; }
02144
02145
02146 inline void atomic_address::store
02147 ( void* __m__, memory_order __x__ ) volatile
02148 { atomic_store_explicit( this, __m__, __x__ ); }
02149
02150 inline void* atomic_address::load( memory_order __x__ ) const volatile
02151 {
02152 if(__x__ == memory_order_relaxed)
02153 return __f__;
02154 return atomic_load_explicit( this, __x__ );
02155 }
02156
02157 inline void* atomic_address::swap
02158 ( void* __m__, memory_order __x__ ) volatile
02159 { return atomic_swap_explicit( this, __m__, __x__ ); }
02160
02161 inline bool atomic_address::compare_swap
02162 ( void*& __e__, void* __m__,
02163 memory_order __x__, memory_order __y__ ) volatile
02164 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__, __y__ ); }
02165
02166 inline bool atomic_address::compare_swap
02167 ( void*& __e__, void* __m__, memory_order __x__ ) volatile
02168 {
02169 compiler_barrier();
02170 return atomic_compare_swap_explicit( this, &__e__, __m__, __x__,
02171 __x__ == memory_order_acq_rel ? memory_order_acquire :
02172 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
02173
02174 inline void atomic_address::fence
02175 ( memory_order __x__ ) const volatile
02176 { return atomic_fence( this, __x__ ); }
02177
02178
02179 inline bool atomic_char::is_lock_free() const volatile
02180 { return true; }
02181
02182 inline void atomic_char::store
02183 ( char __m__, memory_order __x__ ) volatile
02184 { atomic_store_explicit( this, __m__, __x__ ); }
02185
02186 inline char atomic_char::load
02187 ( memory_order __x__ ) volatile
02188 { return atomic_load_explicit( this, __x__ ); }
02189
02190 inline char atomic_char::swap
02191 ( char __m__, memory_order __x__ ) volatile
02192 { return atomic_swap_explicit( this, __m__, __x__ ); }
02193
02194 inline bool atomic_char::compare_swap
02195 ( char& __e__, char __m__,
02196 memory_order __x__, memory_order __y__ ) volatile
02197 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__, __y__ ); }
02198
02199 inline bool atomic_char::compare_swap
02200 ( char& __e__, char __m__, memory_order __x__ ) volatile
02201 {
02202 compiler_barrier();
02203 return atomic_compare_swap_explicit( this, &__e__, __m__, __x__,
02204 __x__ == memory_order_acq_rel ? memory_order_acquire :
02205 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
02206
02207 inline void atomic_char::fence
02208 ( memory_order __x__ ) const volatile
02209 { return atomic_fence( this, __x__ ); }
02210
02211
02212 inline bool atomic_schar::is_lock_free() const volatile
02213 { return true; }
02214
02215 inline void atomic_schar::store
02216 ( signed char __m__, memory_order __x__ ) volatile
02217 { atomic_store_explicit( this, __m__, __x__ ); }
02218
02219 inline signed char atomic_schar::load
02220 ( memory_order __x__ ) volatile
02221 { return atomic_load_explicit( this, __x__ ); }
02222
02223 inline signed char atomic_schar::swap
02224 ( signed char __m__, memory_order __x__ ) volatile
02225 { return atomic_swap_explicit( this, __m__, __x__ ); }
02226
02227 inline bool atomic_schar::compare_swap
02228 ( signed char& __e__, signed char __m__,
02229 memory_order __x__, memory_order __y__ ) volatile
02230 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__, __y__ ); }
02231
02232 inline bool atomic_schar::compare_swap
02233 ( signed char& __e__, signed char __m__, memory_order __x__ ) volatile
02234 {
02235 compiler_barrier();
02236 return atomic_compare_swap_explicit( this, &__e__, __m__, __x__,
02237 __x__ == memory_order_acq_rel ? memory_order_acquire :
02238 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
02239
02240 inline void atomic_schar::fence
02241 ( memory_order __x__ ) const volatile
02242 { return atomic_fence( this, __x__ ); }
02243
02244
02245 inline bool atomic_uchar::is_lock_free() const volatile
02246 { return true; }
02247
02248 inline void atomic_uchar::store
02249 ( unsigned char __m__, memory_order __x__ ) volatile
02250 { atomic_store_explicit( this, __m__, __x__ ); }
02251
02252 inline unsigned char atomic_uchar::load
02253 ( memory_order __x__ ) volatile
02254 { return atomic_load_explicit( this, __x__ ); }
02255
02256 inline unsigned char atomic_uchar::swap
02257 ( unsigned char __m__, memory_order __x__ ) volatile
02258 { return atomic_swap_explicit( this, __m__, __x__ ); }
02259
02260 inline bool atomic_uchar::compare_swap
02261 ( unsigned char& __e__, unsigned char __m__,
02262 memory_order __x__, memory_order __y__ ) volatile
02263 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__, __y__ ); }
02264
02265 inline bool atomic_uchar::compare_swap
02266 ( unsigned char& __e__, unsigned char __m__, memory_order __x__ ) volatile
02267 {
02268 compiler_barrier();
02269 return atomic_compare_swap_explicit( this, &__e__, __m__, __x__,
02270 __x__ == memory_order_acq_rel ? memory_order_acquire :
02271 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
02272
02273 inline void atomic_uchar::fence
02274 ( memory_order __x__ ) const volatile
02275 { return atomic_fence( this, __x__ ); }
02276
02277
02278 inline bool atomic_short::is_lock_free() const volatile
02279 { return true; }
02280
02281 inline void atomic_short::store
02282 ( short __m__, memory_order __x__ ) volatile
02283 { atomic_store_explicit( this, __m__, __x__ ); }
02284
02285 inline short atomic_short::load
02286 ( memory_order __x__ ) volatile
02287 { return atomic_load_explicit( this, __x__ ); }
02288
02289 inline short atomic_short::swap
02290 ( short __m__, memory_order __x__ ) volatile
02291 { return atomic_swap_explicit( this, __m__, __x__ ); }
02292
02293 inline bool atomic_short::compare_swap
02294 ( short& __e__, short __m__,
02295 memory_order __x__, memory_order __y__ ) volatile
02296 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__, __y__ ); }
02297
02298 inline bool atomic_short::compare_swap
02299 ( short& __e__, short __m__, memory_order __x__ ) volatile
02300 {
02301 compiler_barrier();
02302 return atomic_compare_swap_explicit( this, &__e__, __m__, __x__,
02303 __x__ == memory_order_acq_rel ? memory_order_acquire :
02304 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
02305
02306 inline void atomic_short::fence
02307 ( memory_order __x__ ) const volatile
02308 { return atomic_fence( this, __x__ ); }
02309
02310
02311 inline bool atomic_ushort::is_lock_free() const volatile
02312 { return true; }
02313
02314 inline void atomic_ushort::store
02315 ( unsigned short __m__, memory_order __x__ ) volatile
02316 { atomic_store_explicit( this, __m__, __x__ ); }
02317
02318 inline unsigned short atomic_ushort::load
02319 ( memory_order __x__ ) volatile
02320 { return atomic_load_explicit( this, __x__ ); }
02321
02322 inline unsigned short atomic_ushort::swap
02323 ( unsigned short __m__, memory_order __x__ ) volatile
02324 { return atomic_swap_explicit( this, __m__, __x__ ); }
02325
02326 inline bool atomic_ushort::compare_swap
02327 ( unsigned short& __e__, unsigned short __m__,
02328 memory_order __x__, memory_order __y__ ) volatile
02329 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__, __y__ ); }
02330
02331 inline bool atomic_ushort::compare_swap
02332 ( unsigned short& __e__, unsigned short __m__, memory_order __x__ ) volatile
02333 {
02334 compiler_barrier();
02335 return atomic_compare_swap_explicit( this, &__e__, __m__, __x__,
02336 __x__ == memory_order_acq_rel ? memory_order_acquire :
02337 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
02338
02339 inline void atomic_ushort::fence
02340 ( memory_order __x__ ) const volatile
02341 { return atomic_fence( this, __x__ ); }
02342
02343
02344 inline bool atomic_int::is_lock_free() const volatile
02345 { return true; }
02346
02347 inline void atomic_int::store
02348 ( int __m__, memory_order __x__ ) volatile
02349 { atomic_store_explicit( this, __m__, __x__ ); }
02350
02351 inline int atomic_int::load( memory_order __x__ ) volatile
02352 {
02353 if(__x__ == memory_order_relaxed)
02354 return __f__;
02355 return atomic_load_explicit( this, __x__ );
02356 }
02357
02358 inline int atomic_int::swap
02359 ( int __m__, memory_order __x__ ) volatile
02360 { return atomic_swap_explicit( this, __m__, __x__ ); }
02361
02362 inline bool atomic_int::compare_swap
02363 ( int& __e__, int __m__,
02364 memory_order __x__, memory_order __y__ ) volatile
02365 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__, __y__ ); }
02366
02367 inline bool atomic_int::compare_swap
02368 ( int& __e__, int __m__, memory_order __x__ ) volatile
02369 {
02370 compiler_barrier();
02371 return atomic_compare_swap_explicit( this, &__e__, __m__, __x__,
02372 __x__ == memory_order_acq_rel ? memory_order_acquire :
02373 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
02374
02375 inline void atomic_int::fence
02376 ( memory_order __x__ ) const volatile
02377 { return atomic_fence( this, __x__ ); }
02378
02379 inline bool atomic_uint::is_lock_free() const volatile
02380 { return true; }
02381
02382 inline void atomic_uint::store
02383 ( unsigned int __m__, memory_order __x__ ) volatile
02384 { atomic_store_explicit( this, __m__, __x__ ); }
02385
02386 inline unsigned int atomic_uint::load
02387 ( memory_order __x__ ) volatile
02388 { return atomic_load_explicit( this, __x__ ); }
02389
02390 inline unsigned int atomic_uint::swap
02391 ( unsigned int __m__, memory_order __x__ ) volatile
02392 { return atomic_swap_explicit( this, __m__, __x__ ); }
02393
02394 inline bool atomic_uint::compare_swap
02395 ( unsigned int& __e__, unsigned int __m__,
02396 memory_order __x__, memory_order __y__ ) volatile
02397 {
02398 return atomic_compare_swap_explicit( this, &__e__, __m__, __x__, __y__ ); }
02399
02400 inline bool atomic_uint::compare_swap
02401 ( unsigned int& __e__, unsigned int __m__, memory_order __x__ ) volatile
02402 {
02403 compiler_barrier();
02404 return atomic_compare_swap_explicit( this, &__e__, __m__, __x__,
02405 __x__ == memory_order_acq_rel ? memory_order_acquire :
02406 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
02407
02408 inline void atomic_uint::fence
02409 ( memory_order __x__ ) const volatile
02410 { return atomic_fence( this, __x__ ); }
02411
02412 inline bool atomic_long::is_lock_free() const volatile
02413 { return true; }
02414
02415 inline void atomic_long::store
02416 ( long __m__, memory_order __x__ ) volatile
02417 { atomic_store_explicit( this, __m__, __x__ ); }
02418
02419 inline long atomic_long::load
02420 ( memory_order __x__ ) volatile
02421 { return atomic_load_explicit( this, __x__ ); }
02422
02423 inline long atomic_long::swap
02424 ( long __m__, memory_order __x__ ) volatile
02425 { return atomic_swap_explicit( this, __m__, __x__ ); }
02426
02427 inline bool atomic_long::compare_swap
02428 ( long& __e__, long __m__,
02429 memory_order __x__, memory_order __y__ ) volatile
02430 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__, __y__ ); }
02431
02432 inline bool atomic_long::compare_swap
02433 ( long& __e__, long __m__, memory_order __x__ ) volatile
02434 {
02435 compiler_barrier();
02436 return atomic_compare_swap_explicit( this, &__e__, __m__, __x__,
02437 __x__ == memory_order_acq_rel ? memory_order_acquire :
02438 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
02439
02440 inline void atomic_long::fence
02441 ( memory_order __x__ ) const volatile
02442 { return atomic_fence( this, __x__ ); }
02443
02444
02445 inline bool atomic_ulong::is_lock_free() const volatile
02446 { return true; }
02447
02448 inline void atomic_ulong::store
02449 ( unsigned long __m__, memory_order __x__ ) volatile
02450 { atomic_store_explicit( this, __m__, __x__ ); }
02451
02452 inline unsigned long atomic_ulong::load
02453 ( memory_order __x__ ) volatile
02454 { return atomic_load_explicit( this, __x__ ); }
02455
02456 inline unsigned long atomic_ulong::swap
02457 ( unsigned long __m__, memory_order __x__ ) volatile
02458 { return atomic_swap_explicit( this, __m__, __x__ ); }
02459
02460 inline bool atomic_ulong::compare_swap
02461 ( unsigned long& __e__, unsigned long __m__,
02462 memory_order __x__, memory_order __y__ ) volatile
02463 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__, __y__ ); }
02464
02465 inline bool atomic_ulong::compare_swap
02466 ( unsigned long& __e__, unsigned long __m__, memory_order __x__ ) volatile
02467 {
02468 compiler_barrier();
02469 return atomic_compare_swap_explicit( this, &__e__, __m__, __x__,
02470 __x__ == memory_order_acq_rel ? memory_order_acquire :
02471 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
02472
02473 inline void atomic_ulong::fence
02474 ( memory_order __x__ ) const volatile
02475 { return atomic_fence( this, __x__ ); }
02476
02477
02478 inline bool atomic_llong::is_lock_free() const volatile
02479 { return true; }
02480
02481 inline void atomic_llong::store
02482 ( long long __m__, memory_order __x__ ) volatile
02483 { atomic_store_explicit( this, __m__, __x__ ); }
02484
02485 inline long long atomic_llong::load
02486 ( memory_order __x__ ) volatile
02487 { return atomic_load_explicit( this, __x__ ); }
02488
02489 inline long long atomic_llong::swap
02490 ( long long __m__, memory_order __x__ ) volatile
02491 { return atomic_swap_explicit( this, __m__, __x__ ); }
02492
02493 inline bool atomic_llong::compare_swap
02494 ( long long& __e__, long long __m__,
02495 memory_order __x__, memory_order __y__ ) volatile
02496 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__, __y__ ); }
02497
02498 inline bool atomic_llong::compare_swap
02499 ( long long& __e__, long long __m__, memory_order __x__ ) volatile
02500 {
02501 compiler_barrier();
02502 return atomic_compare_swap_explicit( this, &__e__, __m__, __x__,
02503 __x__ == memory_order_acq_rel ? memory_order_acquire :
02504 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
02505
02506 inline void atomic_llong::fence
02507 ( memory_order __x__ ) const volatile
02508 { return atomic_fence( this, __x__ ); }
02509
02510
02511 inline bool atomic_ullong::is_lock_free() const volatile
02512 { return true; }
02513
02514 inline void atomic_ullong::store
02515 ( unsigned long long __m__, memory_order __x__ ) volatile
02516 { atomic_store_explicit( this, __m__, __x__ ); }
02517
02518 inline unsigned long long atomic_ullong::load
02519 ( memory_order __x__ ) volatile
02520 { return atomic_load_explicit( this, __x__ ); }
02521
02522 inline unsigned long long atomic_ullong::swap
02523 ( unsigned long long __m__, memory_order __x__ ) volatile
02524 { return atomic_swap_explicit( this, __m__, __x__ ); }
02525
02526 inline bool atomic_ullong::compare_swap
02527 ( unsigned long long& __e__, unsigned long long __m__,
02528 memory_order __x__, memory_order __y__ ) volatile
02529 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__, __y__ ); }
02530
02531 inline bool atomic_ullong::compare_swap
02532 ( unsigned long long& __e__, unsigned long long __m__, memory_order __x__ ) volatile
02533 {
02534 compiler_barrier();
02535 return atomic_compare_swap_explicit( this, &__e__, __m__, __x__,
02536 __x__ == memory_order_acq_rel ? memory_order_acquire :
02537 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
02538
02539 inline void atomic_ullong::fence
02540 ( memory_order __x__ ) const volatile
02541 { return atomic_fence( this, __x__ ); }
02542
02543
02544 inline bool atomic_wchar_t::is_lock_free() const volatile
02545 { return true; }
02546
02547 inline void atomic_wchar_t::store
02548 ( wchar_t __m__, memory_order __x__ ) volatile
02549 { atomic_store_explicit( this, __m__, __x__ ); }
02550
02551 inline wchar_t atomic_wchar_t::load
02552 ( memory_order __x__ ) volatile
02553 { return atomic_load_explicit( this, __x__ ); }
02554
02555 inline wchar_t atomic_wchar_t::swap
02556 ( wchar_t __m__, memory_order __x__ ) volatile
02557 { return atomic_swap_explicit( this, __m__, __x__ ); }
02558
02559 inline bool atomic_wchar_t::compare_swap
02560 ( wchar_t& __e__, wchar_t __m__,
02561 memory_order __x__, memory_order __y__ ) volatile
02562 { return atomic_compare_swap_explicit( this, &__e__, __m__, __x__, __y__ ); }
02563
02564 inline bool atomic_wchar_t::compare_swap
02565 ( wchar_t& __e__, wchar_t __m__, memory_order __x__ ) volatile
02566 {
02567 compiler_barrier();
02568 return atomic_compare_swap_explicit( this, &__e__, __m__, __x__,
02569 __x__ == memory_order_acq_rel ? memory_order_acquire :
02570 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
02571
02572 inline void atomic_wchar_t::fence
02573 ( memory_order __x__ ) const volatile
02574 { return atomic_fence( this, __x__ ); }
02575
02576
02577 template< typename T >
02578 inline bool atomic<T>::is_lock_free() const volatile
02579 { return true; }
02580
02581 template< typename T >
02582 inline void atomic<T>::store( T __v__, memory_order __x__ ) volatile
02583 { _ATOMIC_STORE_( this, __v__, __x__ ); }
02584
02585 template< typename T >
02586 inline T atomic<T>::load( memory_order __x__ ) volatile
02587 {
02588 if(__x__ == memory_order_relaxed)
02589 return __f__;
02590 return _ATOMIC_LOAD_( this, __x__ );
02591 }
02592
02593 template< typename T >
02594 inline T atomic<T>::swap( T __v__, memory_order __x__ ) volatile
02595 { return _ATOMIC_MODIFY_( this, ops_swap, __v__, __x__ ); }
02596
02597 template< typename T >
02598 inline bool atomic<T>::compare_swap
02599 ( T& __r__, T __v__, memory_order __x__, memory_order __y__ ) volatile
02600 { return _ATOMIC_CMPSWP_( this, &__r__, __v__, __x__ ); }
02601
02602 template< typename T >
02603 inline bool atomic<T>::compare_swap
02604 ( T& __r__, T __v__, memory_order __x__ ) volatile
02605 {
02606 compiler_barrier();
02607 return compare_swap( __r__, __v__, __x__,
02608 __x__ == memory_order_acq_rel ? memory_order_acquire :
02609 __x__ == memory_order_release ? memory_order_relaxed : __x__ ); }
02610
02611
02612 inline void* atomic_address::fetch_add
02613 ( ptrdiff_t __m__, memory_order __x__ ) volatile
02614 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
02615
02616 inline void* atomic_address::fetch_sub
02617 ( ptrdiff_t __m__, memory_order __x__ ) volatile
02618 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
02619
02620
02621 inline char atomic_char::fetch_add
02622 ( char __m__, memory_order __x__ ) volatile
02623 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
02624
02625
02626 inline char atomic_char::fetch_sub
02627 ( char __m__, memory_order __x__ ) volatile
02628 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
02629
02630
02631 inline char atomic_char::fetch_and
02632 ( char __m__, memory_order __x__ ) volatile
02633 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
02634
02635
02636 inline char atomic_char::fetch_or
02637 ( char __m__, memory_order __x__ ) volatile
02638 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
02639
02640
02641 inline char atomic_char::fetch_xor
02642 ( char __m__, memory_order __x__ ) volatile
02643 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
02644
02645
02646 inline signed char atomic_schar::fetch_add
02647 ( signed char __m__, memory_order __x__ ) volatile
02648 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
02649
02650
02651 inline signed char atomic_schar::fetch_sub
02652 ( signed char __m__, memory_order __x__ ) volatile
02653 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
02654
02655
02656 inline signed char atomic_schar::fetch_and
02657 ( signed char __m__, memory_order __x__ ) volatile
02658 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
02659
02660
02661 inline signed char atomic_schar::fetch_or
02662 ( signed char __m__, memory_order __x__ ) volatile
02663 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
02664
02665
02666 inline signed char atomic_schar::fetch_xor
02667 ( signed char __m__, memory_order __x__ ) volatile
02668 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
02669
02670
02671 inline unsigned char atomic_uchar::fetch_add
02672 ( unsigned char __m__, memory_order __x__ ) volatile
02673 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
02674
02675
02676 inline unsigned char atomic_uchar::fetch_sub
02677 ( unsigned char __m__, memory_order __x__ ) volatile
02678 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
02679
02680
02681 inline unsigned char atomic_uchar::fetch_and
02682 ( unsigned char __m__, memory_order __x__ ) volatile
02683 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
02684
02685
02686 inline unsigned char atomic_uchar::fetch_or
02687 ( unsigned char __m__, memory_order __x__ ) volatile
02688 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
02689
02690
02691 inline unsigned char atomic_uchar::fetch_xor
02692 ( unsigned char __m__, memory_order __x__ ) volatile
02693 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
02694
02695
02696 inline short atomic_short::fetch_add
02697 ( short __m__, memory_order __x__ ) volatile
02698 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
02699
02700
02701 inline short atomic_short::fetch_sub
02702 ( short __m__, memory_order __x__ ) volatile
02703 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
02704
02705
02706 inline short atomic_short::fetch_and
02707 ( short __m__, memory_order __x__ ) volatile
02708 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
02709
02710
02711 inline short atomic_short::fetch_or
02712 ( short __m__, memory_order __x__ ) volatile
02713 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
02714
02715
02716 inline short atomic_short::fetch_xor
02717 ( short __m__, memory_order __x__ ) volatile
02718 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
02719
02720
02721 inline unsigned short atomic_ushort::fetch_add
02722 ( unsigned short __m__, memory_order __x__ ) volatile
02723 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
02724
02725
02726 inline unsigned short atomic_ushort::fetch_sub
02727 ( unsigned short __m__, memory_order __x__ ) volatile
02728 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
02729
02730
02731 inline unsigned short atomic_ushort::fetch_and
02732 ( unsigned short __m__, memory_order __x__ ) volatile
02733 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
02734
02735
02736 inline unsigned short atomic_ushort::fetch_or
02737 ( unsigned short __m__, memory_order __x__ ) volatile
02738 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
02739
02740
02741 inline unsigned short atomic_ushort::fetch_xor
02742 ( unsigned short __m__, memory_order __x__ ) volatile
02743 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
02744
02745
02746 inline int atomic_int::fetch_add
02747 ( int __m__, memory_order __x__ ) volatile
02748 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
02749
02750
02751 inline int atomic_int::fetch_sub
02752 ( int __m__, memory_order __x__ ) volatile
02753 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
02754
02755
02756 inline int atomic_int::fetch_and
02757 ( int __m__, memory_order __x__ ) volatile
02758 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
02759
02760
02761 inline int atomic_int::fetch_or
02762 ( int __m__, memory_order __x__ ) volatile
02763 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
02764
02765
02766 inline int atomic_int::fetch_xor
02767 ( int __m__, memory_order __x__ ) volatile
02768 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
02769
02770
02771 inline unsigned int atomic_uint::fetch_add
02772 ( unsigned int __m__, memory_order __x__ ) volatile
02773 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
02774
02775
02776 inline unsigned int atomic_uint::fetch_sub
02777 ( unsigned int __m__, memory_order __x__ ) volatile
02778 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
02779
02780
02781 inline unsigned int atomic_uint::fetch_and
02782 ( unsigned int __m__, memory_order __x__ ) volatile
02783 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
02784
02785
02786 inline unsigned int atomic_uint::fetch_or
02787 ( unsigned int __m__, memory_order __x__ ) volatile
02788 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
02789
02790
02791 inline unsigned int atomic_uint::fetch_xor
02792 ( unsigned int __m__, memory_order __x__ ) volatile
02793 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
02794
02795
02796 inline long atomic_long::fetch_add
02797 ( long __m__, memory_order __x__ ) volatile
02798 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
02799
02800
02801 inline long atomic_long::fetch_sub
02802 ( long __m__, memory_order __x__ ) volatile
02803 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
02804
02805
02806 inline long atomic_long::fetch_and
02807 ( long __m__, memory_order __x__ ) volatile
02808 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
02809
02810
02811 inline long atomic_long::fetch_or
02812 ( long __m__, memory_order __x__ ) volatile
02813 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
02814
02815
02816 inline long atomic_long::fetch_xor
02817 ( long __m__, memory_order __x__ ) volatile
02818 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
02819
02820
02821 inline unsigned long atomic_ulong::fetch_add
02822 ( unsigned long __m__, memory_order __x__ ) volatile
02823 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
02824
02825
02826 inline unsigned long atomic_ulong::fetch_sub
02827 ( unsigned long __m__, memory_order __x__ ) volatile
02828 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
02829
02830
02831 inline unsigned long atomic_ulong::fetch_and
02832 ( unsigned long __m__, memory_order __x__ ) volatile
02833 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
02834
02835
02836 inline unsigned long atomic_ulong::fetch_or
02837 ( unsigned long __m__, memory_order __x__ ) volatile
02838 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
02839
02840
02841 inline unsigned long atomic_ulong::fetch_xor
02842 ( unsigned long __m__, memory_order __x__ ) volatile
02843 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
02844
02845
02846 inline long long atomic_llong::fetch_add
02847 ( long long __m__, memory_order __x__ ) volatile
02848 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
02849
02850
02851 inline long long atomic_llong::fetch_sub
02852 ( long long __m__, memory_order __x__ ) volatile
02853 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
02854
02855
02856 inline long long atomic_llong::fetch_and
02857 ( long long __m__, memory_order __x__ ) volatile
02858 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
02859
02860
02861 inline long long atomic_llong::fetch_or
02862 ( long long __m__, memory_order __x__ ) volatile
02863 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
02864
02865
02866 inline long long atomic_llong::fetch_xor
02867 ( long long __m__, memory_order __x__ ) volatile
02868 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
02869
02870
02871 inline unsigned long long atomic_ullong::fetch_add
02872 ( unsigned long long __m__, memory_order __x__ ) volatile
02873 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
02874
02875
02876 inline unsigned long long atomic_ullong::fetch_sub
02877 ( unsigned long long __m__, memory_order __x__ ) volatile
02878 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
02879
02880
02881 inline unsigned long long atomic_ullong::fetch_and
02882 ( unsigned long long __m__, memory_order __x__ ) volatile
02883 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
02884
02885
02886 inline unsigned long long atomic_ullong::fetch_or
02887 ( unsigned long long __m__, memory_order __x__ ) volatile
02888 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
02889
02890
02891 inline unsigned long long atomic_ullong::fetch_xor
02892 ( unsigned long long __m__, memory_order __x__ ) volatile
02893 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
02894
02895
02896 inline wchar_t atomic_wchar_t::fetch_add
02897 ( wchar_t __m__, memory_order __x__ ) volatile
02898 { return atomic_fetch_add_explicit( this, __m__, __x__ ); }
02899
02900
02901 inline wchar_t atomic_wchar_t::fetch_sub
02902 ( wchar_t __m__, memory_order __x__ ) volatile
02903 { return atomic_fetch_sub_explicit( this, __m__, __x__ ); }
02904
02905
02906 inline wchar_t atomic_wchar_t::fetch_and
02907 ( wchar_t __m__, memory_order __x__ ) volatile
02908 { return atomic_fetch_and_explicit( this, __m__, __x__ ); }
02909
02910
02911 inline wchar_t atomic_wchar_t::fetch_or
02912 ( wchar_t __m__, memory_order __x__ ) volatile
02913 { return atomic_fetch_or_explicit( this, __m__, __x__ ); }
02914
02915
02916 inline wchar_t atomic_wchar_t::fetch_xor
02917 ( wchar_t __m__, memory_order __x__ ) volatile
02918 { return atomic_fetch_xor_explicit( this, __m__, __x__ ); }
02919
02920
02921 template< typename T >
02922 T* atomic<T*>::load( memory_order __x__ ) const volatile
02923 { return static_cast<T*>( atomic_address::load( __x__ ) ); }
02924
02925 template< typename T >
02926 T* atomic<T*>::swap( T* __v__, memory_order __x__ ) volatile
02927 { return static_cast<T*>( atomic_address::swap( __v__, __x__ ) ); }
02928
02929 template< typename T >
02930 bool atomic<T*>::compare_swap
02931 ( T*& __r__, T* __v__, memory_order __x__, memory_order __y__) volatile
02932 { return atomic_address::compare_swap( *reinterpret_cast<void**>( &__r__ ),
02933 static_cast<void*>( __v__ ), __x__, __y__ ); }
02934
02935 template< typename T >
02936 bool atomic<T*>::compare_swap
02937 ( T*& __r__, T* __v__, memory_order __x__ ) volatile
02938 {
02939 compiler_barrier();
02940 return compare_swap( __r__, __v__, __x__,
02941 __x__ == memory_order_acq_rel ? memory_order_acquire :
02942 __x__ == memory_order_release ? memory_order_relaxed : __x__ );
02943 }
02944
02945 template< typename T >
02946 T* atomic<T*>::fetch_add( ptrdiff_t __v__, memory_order __x__ ) volatile
02947 { return atomic_fetch_add_explicit( this, sizeof(T) * __v__, __x__ ); }
02948
02949 template< typename T >
02950 T* atomic<T*>::fetch_sub( ptrdiff_t __v__, memory_order __x__ ) volatile
02951 { return atomic_fetch_sub_explicit( this, sizeof(T) * __v__, __x__ ); }
02952
02953
02954 #endif
02955
02956
02957 #ifdef __cplusplus
02958 }
02959 #endif
02960
02961 #endif