00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011
00012
00013
00014
00015
00016
00017
00018
00019
00020
00021
00022
00023
00024
00025
00026
00027
00028
00029
00030
00031 #ifndef _GLIBCXX_ATOMIC_0_H
00032 #define _GLIBCXX_ATOMIC_0_H 1
00033
00034 #pragma GCC system_header
00035
00036
00037
00038
00039 namespace __atomic0
00040 {
00041 struct atomic_flag;
00042
00043
00044 #define _ATOMIC_LOAD_(__a, __x) \
00045 ({ volatile __typeof__ _ATOMIC_MEMBER_* __p = &_ATOMIC_MEMBER_; \
00046 volatile __atomic_flag_base* __g = __atomic_flag_for_address(__p); \
00047 __atomic_flag_wait_explicit(__g, __x); \
00048 __typeof__ _ATOMIC_MEMBER_ __r = *__p; \
00049 atomic_flag_clear_explicit(__g, __x); \
00050 __r; })
00051
00052 #define _ATOMIC_STORE_(__a, __m, __x) \
00053 ({ volatile __typeof__ _ATOMIC_MEMBER_* __p = &_ATOMIC_MEMBER_; \
00054 __typeof__(__m) __v = (__m); \
00055 volatile __atomic_flag_base* __g = __atomic_flag_for_address(__p); \
00056 __atomic_flag_wait_explicit(__g, __x); \
00057 *__p = __v; \
00058 atomic_flag_clear_explicit(__g, __x); \
00059 __v; })
00060
00061 #define _ATOMIC_MODIFY_(__a, __o, __m, __x) \
00062 ({ volatile __typeof__ _ATOMIC_MEMBER_* __p = &_ATOMIC_MEMBER_; \
00063 __typeof__(__m) __v = (__m); \
00064 volatile __atomic_flag_base* __g = __atomic_flag_for_address(__p); \
00065 __atomic_flag_wait_explicit(__g, __x); \
00066 __typeof__ _ATOMIC_MEMBER_ __r = *__p; \
00067 *__p __o __v; \
00068 atomic_flag_clear_explicit(__g, __x); \
00069 __r; })
00070
00071 #define _ATOMIC_CMPEXCHNG_(__a, __e, __m, __x) \
00072 ({ volatile __typeof__ _ATOMIC_MEMBER_* __p = &_ATOMIC_MEMBER_; \
00073 __typeof__(__e) __q = (__e); \
00074 __typeof__(__m) __v = (__m); \
00075 bool __r; \
00076 volatile __atomic_flag_base* __g = __atomic_flag_for_address(__p); \
00077 __atomic_flag_wait_explicit(__g, __x); \
00078 __typeof__ _ATOMIC_MEMBER_ __t__ = *__p; \
00079 if (__t__ == *__q) { *__p = __v; __r = true; } \
00080 else { *__q = __t__; __r = false; } \
00081 atomic_flag_clear_explicit(__g, __x); \
00082 __r; })
00083
00084
00085 struct atomic_flag : private __atomic_flag_base
00086 {
00087 atomic_flag() = default;
00088 ~atomic_flag() = default;
00089 atomic_flag(const atomic_flag&) = delete;
00090 atomic_flag& operator=(const atomic_flag&) = delete;
00091
00092 atomic_flag(bool __i) { _M_i = __i; }
00093
00094 bool
00095 test_and_set(memory_order __m = memory_order_seq_cst) volatile;
00096
00097 void
00098 clear(memory_order __m = memory_order_seq_cst) volatile;
00099 };
00100
00101
00102 struct atomic_address
00103 {
00104 private:
00105 void* _M_i;
00106
00107 public:
00108 atomic_address() = default;
00109 ~atomic_address() = default;
00110 atomic_address(const atomic_address&) = delete;
00111 atomic_address& operator=(const atomic_address&) = delete;
00112
00113 atomic_address(void* __v) { _M_i = __v; }
00114
00115 bool
00116 is_lock_free() const volatile
00117 { return false; }
00118
00119 void
00120 store(void* __v, memory_order __m = memory_order_seq_cst) volatile
00121 {
00122 __glibcxx_assert(__m != memory_order_acquire);
00123 __glibcxx_assert(__m != memory_order_acq_rel);
00124 __glibcxx_assert(__m != memory_order_consume);
00125 _ATOMIC_STORE_(this, __v, __m);
00126 }
00127
00128 void*
00129 load(memory_order __m = memory_order_seq_cst) const volatile
00130 {
00131 __glibcxx_assert(__m != memory_order_release);
00132 __glibcxx_assert(__m != memory_order_acq_rel);
00133 return _ATOMIC_LOAD_(this, __m);
00134 }
00135
00136 void*
00137 exchange(void* __v, memory_order __m = memory_order_seq_cst) volatile
00138 { return _ATOMIC_MODIFY_(this, =, __v, __m); }
00139
00140 bool
00141 compare_exchange_weak(void*& __v1, void* __v2, memory_order __m1,
00142 memory_order __m2) volatile
00143 {
00144 __glibcxx_assert(__m2 != memory_order_release);
00145 __glibcxx_assert(__m2 != memory_order_acq_rel);
00146 __glibcxx_assert(__m2 <= __m1);
00147 return _ATOMIC_CMPEXCHNG_(this, &__v1, __v2, __m1);
00148 }
00149
00150 bool
00151 compare_exchange_weak(void*& __v1, void* __v2,
00152 memory_order __m = memory_order_seq_cst) volatile
00153 {
00154 return compare_exchange_weak(__v1, __v2, __m,
00155 __calculate_memory_order(__m));
00156 }
00157
00158 bool
00159 compare_exchange_strong(void*& __v1, void* __v2, memory_order __m1,
00160 memory_order __m2) volatile
00161 {
00162 __glibcxx_assert(__m2 != memory_order_release);
00163 __glibcxx_assert(__m2 != memory_order_acq_rel);
00164 __glibcxx_assert(__m2 <= __m1);
00165 return _ATOMIC_CMPEXCHNG_(this, &__v1, __v2, __m1);
00166 }
00167
00168 bool
00169 compare_exchange_strong(void*& __v1, void* __v2,
00170 memory_order __m = memory_order_seq_cst) volatile
00171 {
00172 return compare_exchange_strong(__v1, __v2, __m,
00173 __calculate_memory_order(__m));
00174 }
00175
00176 void*
00177 fetch_add(ptrdiff_t __d, memory_order __m = memory_order_seq_cst) volatile
00178 {
00179 void* volatile* __p = &(_M_i);
00180 volatile __atomic_flag_base* __g = __atomic_flag_for_address(__p);
00181 __atomic_flag_wait_explicit(__g, __m);
00182 void* __r = *__p;
00183 *__p = (void*)((char*)(*__p) + __d);
00184 atomic_flag_clear_explicit(__g, __m);
00185 return __r;
00186 }
00187
00188 void*
00189 fetch_sub(ptrdiff_t __d, memory_order __m = memory_order_seq_cst) volatile
00190 {
00191 void* volatile* __p = &(_M_i);
00192 volatile __atomic_flag_base* __g = __atomic_flag_for_address(__p);
00193 __atomic_flag_wait_explicit(__g, __m);
00194 void* __r = *__p;
00195 *__p = (void*)((char*)(*__p) - __d);
00196 atomic_flag_clear_explicit(__g, __m);
00197 return __r;
00198 }
00199
00200 operator void*() const volatile
00201 { return load(); }
00202
00203 void*
00204 operator=(void* __v)
00205 {
00206 store(__v);
00207 return __v;
00208 }
00209
00210 void*
00211 operator+=(ptrdiff_t __d) volatile
00212 { return fetch_add(__d) + __d; }
00213
00214 void*
00215 operator-=(ptrdiff_t __d) volatile
00216 { return fetch_sub(__d) - __d; }
00217 };
00218
00219
00220
00221
00222
00223
00224
00225
00226
00227
00228
00229
00230
00231
00232
00233
00234
00235
00236
00237
00238
00239
00240
00241
00242 template<typename _ITp>
00243 struct __atomic_base
00244 {
00245 private:
00246 typedef _ITp __integral_type;
00247
00248 __integral_type _M_i;
00249
00250 public:
00251 __atomic_base() = default;
00252 ~__atomic_base() = default;
00253 __atomic_base(const __atomic_base&) = delete;
00254 __atomic_base& operator=(const __atomic_base&) = delete;
00255
00256
00257 __atomic_base(__integral_type __i) { _M_i = __i; }
00258
00259 operator __integral_type() const volatile
00260 { return load(); }
00261
00262 __integral_type
00263 operator=(__integral_type __i)
00264 {
00265 store(__i);
00266 return __i;
00267 }
00268
00269 __integral_type
00270 operator++(int) volatile
00271 { return fetch_add(1); }
00272
00273 __integral_type
00274 operator--(int) volatile
00275 { return fetch_sub(1); }
00276
00277 __integral_type
00278 operator++() volatile
00279 { return fetch_add(1) + 1; }
00280
00281 __integral_type
00282 operator--() volatile
00283 { return fetch_sub(1) - 1; }
00284
00285 __integral_type
00286 operator+=(__integral_type __i) volatile
00287 { return fetch_add(__i) + __i; }
00288
00289 __integral_type
00290 operator-=(__integral_type __i) volatile
00291 { return fetch_sub(__i) - __i; }
00292
00293 __integral_type
00294 operator&=(__integral_type __i) volatile
00295 { return fetch_and(__i) & __i; }
00296
00297 __integral_type
00298 operator|=(__integral_type __i) volatile
00299 { return fetch_or(__i) | __i; }
00300
00301 __integral_type
00302 operator^=(__integral_type __i) volatile
00303 { return fetch_xor(__i) ^ __i; }
00304
00305 bool
00306 is_lock_free() const volatile
00307 { return false; }
00308
00309 void
00310 store(__integral_type __i,
00311 memory_order __m = memory_order_seq_cst) volatile
00312 {
00313 __glibcxx_assert(__m != memory_order_acquire);
00314 __glibcxx_assert(__m != memory_order_acq_rel);
00315 __glibcxx_assert(__m != memory_order_consume);
00316 _ATOMIC_STORE_(this, __i, __m);
00317 }
00318
00319 __integral_type
00320 load(memory_order __m = memory_order_seq_cst) const volatile
00321 {
00322 __glibcxx_assert(__m != memory_order_release);
00323 __glibcxx_assert(__m != memory_order_acq_rel);
00324 return _ATOMIC_LOAD_(this, __m);
00325 }
00326
00327 __integral_type
00328 exchange(__integral_type __i,
00329 memory_order __m = memory_order_seq_cst) volatile
00330 { return _ATOMIC_MODIFY_(this, =, __i, __m); }
00331
00332 bool
00333 compare_exchange_weak(__integral_type& __i1, __integral_type __i2,
00334 memory_order __m1, memory_order __m2) volatile
00335 {
00336 __glibcxx_assert(__m2 != memory_order_release);
00337 __glibcxx_assert(__m2 != memory_order_acq_rel);
00338 __glibcxx_assert(__m2 <= __m1);
00339 return _ATOMIC_CMPEXCHNG_(this, &__i1, __i2, __m1);
00340 }
00341
00342 bool
00343 compare_exchange_weak(__integral_type& __i1, __integral_type __i2,
00344 memory_order __m = memory_order_seq_cst) volatile
00345 {
00346 return compare_exchange_weak(__i1, __i2, __m,
00347 __calculate_memory_order(__m));
00348 }
00349
00350 bool
00351 compare_exchange_strong(__integral_type& __i1, __integral_type __i2,
00352 memory_order __m1, memory_order __m2) volatile
00353 {
00354 __glibcxx_assert(__m2 != memory_order_release);
00355 __glibcxx_assert(__m2 != memory_order_acq_rel);
00356 __glibcxx_assert(__m2 <= __m1);
00357 return _ATOMIC_CMPEXCHNG_(this, &__i1, __i2, __m1);
00358 }
00359
00360 bool
00361 compare_exchange_strong(__integral_type& __i1, __integral_type __i2,
00362 memory_order __m = memory_order_seq_cst) volatile
00363 {
00364 return compare_exchange_strong(__i1, __i2, __m,
00365 __calculate_memory_order(__m));
00366 }
00367
00368 __integral_type
00369 fetch_add(__integral_type __i,
00370 memory_order __m = memory_order_seq_cst) volatile
00371 { return _ATOMIC_MODIFY_(this, +=, __i, __m); }
00372
00373 __integral_type
00374 fetch_sub(__integral_type __i,
00375 memory_order __m = memory_order_seq_cst) volatile
00376 { return _ATOMIC_MODIFY_(this, -=, __i, __m); }
00377
00378 __integral_type
00379 fetch_and(__integral_type __i,
00380 memory_order __m = memory_order_seq_cst) volatile
00381 { return _ATOMIC_MODIFY_(this, &=, __i, __m); }
00382
00383 __integral_type
00384 fetch_or(__integral_type __i,
00385 memory_order __m = memory_order_seq_cst) volatile
00386 { return _ATOMIC_MODIFY_(this, |=, __i, __m); }
00387
00388 __integral_type
00389 fetch_xor(__integral_type __i,
00390 memory_order __m = memory_order_seq_cst) volatile
00391 { return _ATOMIC_MODIFY_(this, ^=, __i, __m); }
00392 };
00393
00394
00395
00396
00397 struct atomic_bool
00398 {
00399 private:
00400 __atomic_base<bool> _M_base;
00401
00402 public:
00403 atomic_bool() = default;
00404 ~atomic_bool() = default;
00405 atomic_bool(const atomic_bool&) = delete;
00406 atomic_bool& operator=(const atomic_bool&) = delete;
00407
00408 atomic_bool(bool __i) : _M_base(__i) { }
00409
00410 bool
00411 operator=(bool __i)
00412 { return _M_base.operator=(__i); }
00413
00414 operator bool() const volatile
00415 { return _M_base.load(); }
00416
00417 bool
00418 is_lock_free() const volatile
00419 { return _M_base.is_lock_free(); }
00420
00421 void
00422 store(bool __i, memory_order __m = memory_order_seq_cst) volatile
00423 { _M_base.store(__i, __m); }
00424
00425 bool
00426 load(memory_order __m = memory_order_seq_cst) const volatile
00427 { return _M_base.load(__m); }
00428
00429 bool
00430 exchange(bool __i, memory_order __m = memory_order_seq_cst) volatile
00431 { return _M_base.exchange(__i, __m); }
00432
00433 bool
00434 compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
00435 memory_order __m2) volatile
00436 { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
00437
00438 bool
00439 compare_exchange_weak(bool& __i1, bool __i2,
00440 memory_order __m = memory_order_seq_cst) volatile
00441 { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
00442
00443 bool
00444 compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
00445 memory_order __m2) volatile
00446 { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
00447
00448
00449 bool
00450 compare_exchange_strong(bool& __i1, bool __i2,
00451 memory_order __m = memory_order_seq_cst) volatile
00452 { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
00453 };
00454
00455 #undef _ATOMIC_LOAD_
00456 #undef _ATOMIC_STORE_
00457 #undef _ATOMIC_MODIFY_
00458 #undef _ATOMIC_CMPEXCHNG_
00459 }
00460
00461
00462
00463 #endif