30 #ifndef _GLIBCXX_ATOMIC_BASE_H 31 #define _GLIBCXX_ATOMIC_BASE_H 1 33 #pragma GCC system_header 39 #ifndef _GLIBCXX_ALWAYS_INLINE 40 #define _GLIBCXX_ALWAYS_INLINE inline __attribute__((__always_inline__)) 43 namespace std _GLIBCXX_VISIBILITY(default)
45 _GLIBCXX_BEGIN_NAMESPACE_VERSION
55 #if __cplusplus > 201703L 66 inline constexpr
memory_order memory_order_relaxed = memory_order::relaxed;
67 inline constexpr
memory_order memory_order_consume = memory_order::consume;
68 inline constexpr
memory_order memory_order_acquire = memory_order::acquire;
69 inline constexpr
memory_order memory_order_release = memory_order::release;
70 inline constexpr
memory_order memory_order_acq_rel = memory_order::acq_rel;
71 inline constexpr
memory_order memory_order_seq_cst = memory_order::seq_cst;
84 enum __memory_order_modifier
86 __memory_order_mask = 0x0ffff,
87 __memory_order_modifier_mask = 0xffff0000,
88 __memory_order_hle_acquire = 0x10000,
89 __memory_order_hle_release = 0x20000
92 constexpr memory_order
93 operator|(memory_order __m, __memory_order_modifier __mod)
98 constexpr memory_order
99 operator&(memory_order __m, __memory_order_modifier __mod)
105 constexpr memory_order
106 __cmpexch_failure_order2(memory_order __m) noexcept
108 return __m == memory_order_acq_rel ? memory_order_acquire
109 : __m == memory_order_release ? memory_order_relaxed : __m;
112 constexpr memory_order
113 __cmpexch_failure_order(memory_order __m) noexcept
115 return memory_order(__cmpexch_failure_order2(__m & __memory_order_mask)
116 | __memory_order_modifier(__m & __memory_order_modifier_mask));
119 _GLIBCXX_ALWAYS_INLINE
void 120 atomic_thread_fence(memory_order __m) noexcept
121 { __atomic_thread_fence(
int(__m)); }
123 _GLIBCXX_ALWAYS_INLINE
void 124 atomic_signal_fence(memory_order __m) noexcept
125 { __atomic_signal_fence(
int(__m)); }
128 template<
typename _Tp>
138 template<
typename _IntTp>
142 #define ATOMIC_VAR_INIT(_VI) { _VI } 144 template<
typename _Tp>
147 template<
typename _Tp>
151 #if __GCC_ATOMIC_TEST_AND_SET_TRUEVAL == 1 152 typedef bool __atomic_flag_data_type;
154 typedef unsigned char __atomic_flag_data_type;
167 _GLIBCXX_BEGIN_EXTERN_C
171 __atomic_flag_data_type _M_i;
174 _GLIBCXX_END_EXTERN_C
176 #define ATOMIC_FLAG_INIT { 0 } 192 _GLIBCXX_ALWAYS_INLINE
bool 193 test_and_set(memory_order __m = memory_order_seq_cst) noexcept
195 return __atomic_test_and_set (&_M_i,
int(__m));
198 _GLIBCXX_ALWAYS_INLINE
bool 199 test_and_set(memory_order __m = memory_order_seq_cst)
volatile noexcept
201 return __atomic_test_and_set (&_M_i,
int(__m));
204 _GLIBCXX_ALWAYS_INLINE
void 205 clear(memory_order __m = memory_order_seq_cst) noexcept
207 memory_order __b = __m & __memory_order_mask;
208 __glibcxx_assert(__b != memory_order_consume);
209 __glibcxx_assert(__b != memory_order_acquire);
210 __glibcxx_assert(__b != memory_order_acq_rel);
212 __atomic_clear (&_M_i,
int(__m));
215 _GLIBCXX_ALWAYS_INLINE
void 216 clear(memory_order __m = memory_order_seq_cst)
volatile noexcept
218 memory_order __b = __m & __memory_order_mask;
219 __glibcxx_assert(__b != memory_order_consume);
220 __glibcxx_assert(__b != memory_order_acquire);
221 __glibcxx_assert(__b != memory_order_acq_rel);
223 __atomic_clear (&_M_i,
int(__m));
227 static constexpr __atomic_flag_data_type
229 {
return __i ? __GCC_ATOMIC_TEST_AND_SET_TRUEVAL : 0; }
257 template<
typename _ITp>
260 using value_type = _ITp;
261 using difference_type = value_type;
264 typedef _ITp __int_type;
266 static constexpr
int _S_alignment =
267 sizeof(_ITp) >
alignof(_ITp) ?
sizeof(_ITp) :
alignof(_ITp);
269 alignas(_S_alignment) __int_type _M_i;
279 constexpr
__atomic_base(__int_type __i) noexcept : _M_i (__i) { }
281 operator __int_type()
const noexcept
284 operator __int_type()
const volatile noexcept
288 operator=(__int_type __i) noexcept
295 operator=(__int_type __i)
volatile noexcept
302 operator++(
int) noexcept
303 {
return fetch_add(1); }
306 operator++(
int)
volatile noexcept
307 {
return fetch_add(1); }
310 operator--(
int) noexcept
311 {
return fetch_sub(1); }
314 operator--(
int)
volatile noexcept
315 {
return fetch_sub(1); }
318 operator++() noexcept
319 {
return __atomic_add_fetch(&_M_i, 1,
int(memory_order_seq_cst)); }
322 operator++()
volatile noexcept
323 {
return __atomic_add_fetch(&_M_i, 1,
int(memory_order_seq_cst)); }
326 operator--() noexcept
327 {
return __atomic_sub_fetch(&_M_i, 1,
int(memory_order_seq_cst)); }
330 operator--()
volatile noexcept
331 {
return __atomic_sub_fetch(&_M_i, 1,
int(memory_order_seq_cst)); }
334 operator+=(__int_type __i) noexcept
335 {
return __atomic_add_fetch(&_M_i, __i,
int(memory_order_seq_cst)); }
338 operator+=(__int_type __i)
volatile noexcept
339 {
return __atomic_add_fetch(&_M_i, __i,
int(memory_order_seq_cst)); }
342 operator-=(__int_type __i) noexcept
343 {
return __atomic_sub_fetch(&_M_i, __i,
int(memory_order_seq_cst)); }
346 operator-=(__int_type __i)
volatile noexcept
347 {
return __atomic_sub_fetch(&_M_i, __i,
int(memory_order_seq_cst)); }
350 operator&=(__int_type __i) noexcept
351 {
return __atomic_and_fetch(&_M_i, __i,
int(memory_order_seq_cst)); }
354 operator&=(__int_type __i)
volatile noexcept
355 {
return __atomic_and_fetch(&_M_i, __i,
int(memory_order_seq_cst)); }
358 operator|=(__int_type __i) noexcept
359 {
return __atomic_or_fetch(&_M_i, __i,
int(memory_order_seq_cst)); }
362 operator|=(__int_type __i)
volatile noexcept
363 {
return __atomic_or_fetch(&_M_i, __i,
int(memory_order_seq_cst)); }
366 operator^=(__int_type __i) noexcept
367 {
return __atomic_xor_fetch(&_M_i, __i,
int(memory_order_seq_cst)); }
370 operator^=(__int_type __i)
volatile noexcept
371 {
return __atomic_xor_fetch(&_M_i, __i,
int(memory_order_seq_cst)); }
374 is_lock_free()
const noexcept
377 return __atomic_is_lock_free(
sizeof(_M_i),
378 reinterpret_cast<void *>(-_S_alignment));
382 is_lock_free()
const volatile noexcept
385 return __atomic_is_lock_free(
sizeof(_M_i),
386 reinterpret_cast<void *>(-_S_alignment));
389 _GLIBCXX_ALWAYS_INLINE
void 390 store(__int_type __i, memory_order __m = memory_order_seq_cst) noexcept
392 memory_order __b = __m & __memory_order_mask;
393 __glibcxx_assert(__b != memory_order_acquire);
394 __glibcxx_assert(__b != memory_order_acq_rel);
395 __glibcxx_assert(__b != memory_order_consume);
397 __atomic_store_n(&_M_i, __i,
int(__m));
400 _GLIBCXX_ALWAYS_INLINE
void 401 store(__int_type __i,
402 memory_order __m = memory_order_seq_cst)
volatile noexcept
404 memory_order __b = __m & __memory_order_mask;
405 __glibcxx_assert(__b != memory_order_acquire);
406 __glibcxx_assert(__b != memory_order_acq_rel);
407 __glibcxx_assert(__b != memory_order_consume);
409 __atomic_store_n(&_M_i, __i,
int(__m));
412 _GLIBCXX_ALWAYS_INLINE __int_type
413 load(memory_order __m = memory_order_seq_cst)
const noexcept
415 memory_order __b = __m & __memory_order_mask;
416 __glibcxx_assert(__b != memory_order_release);
417 __glibcxx_assert(__b != memory_order_acq_rel);
419 return __atomic_load_n(&_M_i,
int(__m));
422 _GLIBCXX_ALWAYS_INLINE __int_type
423 load(memory_order __m = memory_order_seq_cst)
const volatile noexcept
425 memory_order __b = __m & __memory_order_mask;
426 __glibcxx_assert(__b != memory_order_release);
427 __glibcxx_assert(__b != memory_order_acq_rel);
429 return __atomic_load_n(&_M_i,
int(__m));
432 _GLIBCXX_ALWAYS_INLINE __int_type
434 memory_order __m = memory_order_seq_cst) noexcept
436 return __atomic_exchange_n(&_M_i, __i,
int(__m));
440 _GLIBCXX_ALWAYS_INLINE __int_type
442 memory_order __m = memory_order_seq_cst)
volatile noexcept
444 return __atomic_exchange_n(&_M_i, __i,
int(__m));
447 _GLIBCXX_ALWAYS_INLINE
bool 448 compare_exchange_weak(__int_type& __i1, __int_type __i2,
449 memory_order __m1, memory_order __m2) noexcept
451 memory_order __b2 = __m2 & __memory_order_mask;
452 memory_order __b1 = __m1 & __memory_order_mask;
453 __glibcxx_assert(__b2 != memory_order_release);
454 __glibcxx_assert(__b2 != memory_order_acq_rel);
455 __glibcxx_assert(__b2 <= __b1);
457 return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1,
458 int(__m1),
int(__m2));
461 _GLIBCXX_ALWAYS_INLINE
bool 462 compare_exchange_weak(__int_type& __i1, __int_type __i2,
464 memory_order __m2)
volatile noexcept
466 memory_order __b2 = __m2 & __memory_order_mask;
467 memory_order __b1 = __m1 & __memory_order_mask;
468 __glibcxx_assert(__b2 != memory_order_release);
469 __glibcxx_assert(__b2 != memory_order_acq_rel);
470 __glibcxx_assert(__b2 <= __b1);
472 return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1,
473 int(__m1),
int(__m2));
476 _GLIBCXX_ALWAYS_INLINE
bool 477 compare_exchange_weak(__int_type& __i1, __int_type __i2,
478 memory_order __m = memory_order_seq_cst) noexcept
480 return compare_exchange_weak(__i1, __i2, __m,
481 __cmpexch_failure_order(__m));
484 _GLIBCXX_ALWAYS_INLINE
bool 485 compare_exchange_weak(__int_type& __i1, __int_type __i2,
486 memory_order __m = memory_order_seq_cst)
volatile noexcept
488 return compare_exchange_weak(__i1, __i2, __m,
489 __cmpexch_failure_order(__m));
492 _GLIBCXX_ALWAYS_INLINE
bool 493 compare_exchange_strong(__int_type& __i1, __int_type __i2,
494 memory_order __m1, memory_order __m2) noexcept
496 memory_order __b2 = __m2 & __memory_order_mask;
497 memory_order __b1 = __m1 & __memory_order_mask;
498 __glibcxx_assert(__b2 != memory_order_release);
499 __glibcxx_assert(__b2 != memory_order_acq_rel);
500 __glibcxx_assert(__b2 <= __b1);
502 return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0,
503 int(__m1),
int(__m2));
506 _GLIBCXX_ALWAYS_INLINE
bool 507 compare_exchange_strong(__int_type& __i1, __int_type __i2,
509 memory_order __m2)
volatile noexcept
511 memory_order __b2 = __m2 & __memory_order_mask;
512 memory_order __b1 = __m1 & __memory_order_mask;
514 __glibcxx_assert(__b2 != memory_order_release);
515 __glibcxx_assert(__b2 != memory_order_acq_rel);
516 __glibcxx_assert(__b2 <= __b1);
518 return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0,
519 int(__m1),
int(__m2));
522 _GLIBCXX_ALWAYS_INLINE
bool 523 compare_exchange_strong(__int_type& __i1, __int_type __i2,
524 memory_order __m = memory_order_seq_cst) noexcept
526 return compare_exchange_strong(__i1, __i2, __m,
527 __cmpexch_failure_order(__m));
530 _GLIBCXX_ALWAYS_INLINE
bool 531 compare_exchange_strong(__int_type& __i1, __int_type __i2,
532 memory_order __m = memory_order_seq_cst)
volatile noexcept
534 return compare_exchange_strong(__i1, __i2, __m,
535 __cmpexch_failure_order(__m));
538 _GLIBCXX_ALWAYS_INLINE __int_type
539 fetch_add(__int_type __i,
540 memory_order __m = memory_order_seq_cst) noexcept
541 {
return __atomic_fetch_add(&_M_i, __i,
int(__m)); }
543 _GLIBCXX_ALWAYS_INLINE __int_type
544 fetch_add(__int_type __i,
545 memory_order __m = memory_order_seq_cst)
volatile noexcept
546 {
return __atomic_fetch_add(&_M_i, __i,
int(__m)); }
548 _GLIBCXX_ALWAYS_INLINE __int_type
549 fetch_sub(__int_type __i,
550 memory_order __m = memory_order_seq_cst) noexcept
551 {
return __atomic_fetch_sub(&_M_i, __i,
int(__m)); }
553 _GLIBCXX_ALWAYS_INLINE __int_type
554 fetch_sub(__int_type __i,
555 memory_order __m = memory_order_seq_cst)
volatile noexcept
556 {
return __atomic_fetch_sub(&_M_i, __i,
int(__m)); }
558 _GLIBCXX_ALWAYS_INLINE __int_type
559 fetch_and(__int_type __i,
560 memory_order __m = memory_order_seq_cst) noexcept
561 {
return __atomic_fetch_and(&_M_i, __i,
int(__m)); }
563 _GLIBCXX_ALWAYS_INLINE __int_type
564 fetch_and(__int_type __i,
565 memory_order __m = memory_order_seq_cst)
volatile noexcept
566 {
return __atomic_fetch_and(&_M_i, __i,
int(__m)); }
568 _GLIBCXX_ALWAYS_INLINE __int_type
569 fetch_or(__int_type __i,
570 memory_order __m = memory_order_seq_cst) noexcept
571 {
return __atomic_fetch_or(&_M_i, __i,
int(__m)); }
573 _GLIBCXX_ALWAYS_INLINE __int_type
574 fetch_or(__int_type __i,
575 memory_order __m = memory_order_seq_cst)
volatile noexcept
576 {
return __atomic_fetch_or(&_M_i, __i,
int(__m)); }
578 _GLIBCXX_ALWAYS_INLINE __int_type
579 fetch_xor(__int_type __i,
580 memory_order __m = memory_order_seq_cst) noexcept
581 {
return __atomic_fetch_xor(&_M_i, __i,
int(__m)); }
583 _GLIBCXX_ALWAYS_INLINE __int_type
584 fetch_xor(__int_type __i,
585 memory_order __m = memory_order_seq_cst)
volatile noexcept
586 {
return __atomic_fetch_xor(&_M_i, __i,
int(__m)); }
591 template<
typename _PTp>
595 typedef _PTp* __pointer_type;
601 _M_type_size(ptrdiff_t __d)
const {
return __d *
sizeof(_PTp); }
604 _M_type_size(ptrdiff_t __d)
const volatile {
return __d *
sizeof(_PTp); }
614 constexpr
__atomic_base(__pointer_type __p) noexcept : _M_p (__p) { }
616 operator __pointer_type()
const noexcept
619 operator __pointer_type()
const volatile noexcept
623 operator=(__pointer_type __p) noexcept
630 operator=(__pointer_type __p)
volatile noexcept
637 operator++(
int) noexcept
638 {
return fetch_add(1); }
641 operator++(
int)
volatile noexcept
642 {
return fetch_add(1); }
645 operator--(
int) noexcept
646 {
return fetch_sub(1); }
649 operator--(
int)
volatile noexcept
650 {
return fetch_sub(1); }
653 operator++() noexcept
654 {
return __atomic_add_fetch(&_M_p, _M_type_size(1),
655 int(memory_order_seq_cst)); }
658 operator++()
volatile noexcept
659 {
return __atomic_add_fetch(&_M_p, _M_type_size(1),
660 int(memory_order_seq_cst)); }
663 operator--() noexcept
664 {
return __atomic_sub_fetch(&_M_p, _M_type_size(1),
665 int(memory_order_seq_cst)); }
668 operator--()
volatile noexcept
669 {
return __atomic_sub_fetch(&_M_p, _M_type_size(1),
670 int(memory_order_seq_cst)); }
673 operator+=(ptrdiff_t __d) noexcept
674 {
return __atomic_add_fetch(&_M_p, _M_type_size(__d),
675 int(memory_order_seq_cst)); }
678 operator+=(ptrdiff_t __d)
volatile noexcept
679 {
return __atomic_add_fetch(&_M_p, _M_type_size(__d),
680 int(memory_order_seq_cst)); }
683 operator-=(ptrdiff_t __d) noexcept
684 {
return __atomic_sub_fetch(&_M_p, _M_type_size(__d),
685 int(memory_order_seq_cst)); }
688 operator-=(ptrdiff_t __d)
volatile noexcept
689 {
return __atomic_sub_fetch(&_M_p, _M_type_size(__d),
690 int(memory_order_seq_cst)); }
693 is_lock_free()
const noexcept
696 return __atomic_is_lock_free(
sizeof(_M_p),
697 reinterpret_cast<void *>(-__alignof(_M_p)));
701 is_lock_free()
const volatile noexcept
704 return __atomic_is_lock_free(
sizeof(_M_p),
705 reinterpret_cast<void *>(-__alignof(_M_p)));
708 _GLIBCXX_ALWAYS_INLINE
void 709 store(__pointer_type __p,
710 memory_order __m = memory_order_seq_cst) noexcept
712 memory_order __b = __m & __memory_order_mask;
714 __glibcxx_assert(__b != memory_order_acquire);
715 __glibcxx_assert(__b != memory_order_acq_rel);
716 __glibcxx_assert(__b != memory_order_consume);
718 __atomic_store_n(&_M_p, __p,
int(__m));
721 _GLIBCXX_ALWAYS_INLINE
void 722 store(__pointer_type __p,
723 memory_order __m = memory_order_seq_cst)
volatile noexcept
725 memory_order __b = __m & __memory_order_mask;
726 __glibcxx_assert(__b != memory_order_acquire);
727 __glibcxx_assert(__b != memory_order_acq_rel);
728 __glibcxx_assert(__b != memory_order_consume);
730 __atomic_store_n(&_M_p, __p,
int(__m));
733 _GLIBCXX_ALWAYS_INLINE __pointer_type
734 load(memory_order __m = memory_order_seq_cst)
const noexcept
736 memory_order __b = __m & __memory_order_mask;
737 __glibcxx_assert(__b != memory_order_release);
738 __glibcxx_assert(__b != memory_order_acq_rel);
740 return __atomic_load_n(&_M_p,
int(__m));
743 _GLIBCXX_ALWAYS_INLINE __pointer_type
744 load(memory_order __m = memory_order_seq_cst)
const volatile noexcept
746 memory_order __b = __m & __memory_order_mask;
747 __glibcxx_assert(__b != memory_order_release);
748 __glibcxx_assert(__b != memory_order_acq_rel);
750 return __atomic_load_n(&_M_p,
int(__m));
753 _GLIBCXX_ALWAYS_INLINE __pointer_type
755 memory_order __m = memory_order_seq_cst) noexcept
757 return __atomic_exchange_n(&_M_p, __p,
int(__m));
761 _GLIBCXX_ALWAYS_INLINE __pointer_type
763 memory_order __m = memory_order_seq_cst)
volatile noexcept
765 return __atomic_exchange_n(&_M_p, __p,
int(__m));
768 _GLIBCXX_ALWAYS_INLINE
bool 769 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
771 memory_order __m2) noexcept
773 memory_order __b2 = __m2 & __memory_order_mask;
774 memory_order __b1 = __m1 & __memory_order_mask;
775 __glibcxx_assert(__b2 != memory_order_release);
776 __glibcxx_assert(__b2 != memory_order_acq_rel);
777 __glibcxx_assert(__b2 <= __b1);
779 return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0,
780 int(__m1),
int(__m2));
783 _GLIBCXX_ALWAYS_INLINE
bool 784 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
786 memory_order __m2)
volatile noexcept
788 memory_order __b2 = __m2 & __memory_order_mask;
789 memory_order __b1 = __m1 & __memory_order_mask;
791 __glibcxx_assert(__b2 != memory_order_release);
792 __glibcxx_assert(__b2 != memory_order_acq_rel);
793 __glibcxx_assert(__b2 <= __b1);
795 return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0,
796 int(__m1),
int(__m2));
799 _GLIBCXX_ALWAYS_INLINE __pointer_type
800 fetch_add(ptrdiff_t __d,
801 memory_order __m = memory_order_seq_cst) noexcept
802 {
return __atomic_fetch_add(&_M_p, _M_type_size(__d),
int(__m)); }
804 _GLIBCXX_ALWAYS_INLINE __pointer_type
805 fetch_add(ptrdiff_t __d,
806 memory_order __m = memory_order_seq_cst)
volatile noexcept
807 {
return __atomic_fetch_add(&_M_p, _M_type_size(__d),
int(__m)); }
809 _GLIBCXX_ALWAYS_INLINE __pointer_type
810 fetch_sub(ptrdiff_t __d,
811 memory_order __m = memory_order_seq_cst) noexcept
812 {
return __atomic_fetch_sub(&_M_p, _M_type_size(__d),
int(__m)); }
814 _GLIBCXX_ALWAYS_INLINE __pointer_type
815 fetch_sub(ptrdiff_t __d,
816 memory_order __m = memory_order_seq_cst)
volatile noexcept
817 {
return __atomic_fetch_sub(&_M_p, _M_type_size(__d),
int(__m)); }
822 _GLIBCXX_END_NAMESPACE_VERSION
Base type for atomic_flag.
Base class for atomic integrals.
ISO C++ entities toplevel namespace is std.
Generic atomic type, primary class template.
_Tp exchange(_Tp &__obj, _Up &&__new_val)
Assign __new_val to __obj and return its previous value.
bitset< _Nb > operator|(const bitset< _Nb > &__x, const bitset< _Nb > &__y) noexcept
Global bitwise operations on bitsets.
_Tp kill_dependency(_Tp __y) noexcept
kill_dependency
bitset< _Nb > operator &(const bitset< _Nb > &__x, const bitset< _Nb > &__y) noexcept
Global bitwise operations on bitsets.
memory_order
Enumeration for memory_order.