Lines Matching refs:__m
830 void store(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
831 {__c11_atomic_store(&__a_, __d, __m);}
833 void store(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
834 {__c11_atomic_store(&__a_, __d, __m);}
836 _Tp load(memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT
837 {return __c11_atomic_load(&__a_, __m);}
839 _Tp load(memory_order __m = memory_order_seq_cst) const _NOEXCEPT
840 {return __c11_atomic_load(&__a_, __m);}
846 _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
847 {return __c11_atomic_exchange(&__a_, __d, __m);}
849 _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
850 {return __c11_atomic_exchange(&__a_, __d, __m);}
869 memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
870 {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);}
873 memory_order __m = memory_order_seq_cst) _NOEXCEPT
874 {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);}
877 memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
878 {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);}
881 memory_order __m = memory_order_seq_cst) _NOEXCEPT
882 {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);}
918 _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
919 {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
921 _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
922 {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
924 _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
925 {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
927 _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
928 {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
930 _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
931 {return __c11_atomic_fetch_and(&this->__a_, __op, __m);}
933 _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
934 {return __c11_atomic_fetch_and(&this->__a_, __op, __m);}
936 _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
937 {return __c11_atomic_fetch_or(&this->__a_, __op, __m);}
939 _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
940 {return __c11_atomic_fetch_or(&this->__a_, __op, __m);}
942 _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
943 {return __c11_atomic_fetch_xor(&this->__a_, __op, __m);}
945 _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
946 {return __c11_atomic_fetch_xor(&this->__a_, __op, __m);}
1026 _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst)
1028 {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
1030 _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1031 {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
1033 _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst)
1035 {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
1037 _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1038 {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
1125 atomic_store_explicit(volatile atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1127 __o->store(__d, __m);
1133 atomic_store_explicit(atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1135 __o->store(__d, __m);
1161 atomic_load_explicit(const volatile atomic<_Tp>* __o, memory_order __m) _NOEXCEPT
1163 return __o->load(__m);
1169 atomic_load_explicit(const atomic<_Tp>* __o, memory_order __m) _NOEXCEPT
1171 return __o->load(__m);
1197 atomic_exchange_explicit(volatile atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1199 return __o->exchange(__d, __m);
1205 atomic_exchange_explicit(atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1207 return __o->exchange(__d, __m);
1340 atomic_fetch_add_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1342 return __o->fetch_add(__op, __m);
1352 atomic_fetch_add_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1354 return __o->fetch_add(__op, __m);
1361 memory_order __m) _NOEXCEPT
1363 return __o->fetch_add(__op, __m);
1369 atomic_fetch_add_explicit(atomic<_Tp*>* __o, ptrdiff_t __op, memory_order __m) _NOEXCEPT
1371 return __o->fetch_add(__op, __m);
1425 atomic_fetch_sub_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1427 return __o->fetch_sub(__op, __m);
1437 atomic_fetch_sub_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1439 return __o->fetch_sub(__op, __m);
1446 memory_order __m) _NOEXCEPT
1448 return __o->fetch_sub(__op, __m);
1454 atomic_fetch_sub_explicit(atomic<_Tp*>* __o, ptrdiff_t __op, memory_order __m) _NOEXCEPT
1456 return __o->fetch_sub(__op, __m);
1494 atomic_fetch_and_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1496 return __o->fetch_and(__op, __m);
1506 atomic_fetch_and_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1508 return __o->fetch_and(__op, __m);
1546 atomic_fetch_or_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1548 return __o->fetch_or(__op, __m);
1558 atomic_fetch_or_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1560 return __o->fetch_or(__op, __m);
1598 atomic_fetch_xor_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1600 return __o->fetch_xor(__op, __m);
1610 atomic_fetch_xor_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1612 return __o->fetch_xor(__op, __m);
1622 bool test_and_set(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1623 {return __c11_atomic_exchange(&__a_, true, __m);}
1625 bool test_and_set(memory_order __m = memory_order_seq_cst) _NOEXCEPT
1626 {return __c11_atomic_exchange(&__a_, true, __m);}
1628 void clear(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1629 {__c11_atomic_store(&__a_, false, __m);}
1631 void clear(memory_order __m = memory_order_seq_cst) _NOEXCEPT
1632 {__c11_atomic_store(&__a_, false, __m);}
1672 atomic_flag_test_and_set_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
1674 return __o->test_and_set(__m);
1679 atomic_flag_test_and_set_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT
1681 return __o->test_and_set(__m);
1700 atomic_flag_clear_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
1702 __o->clear(__m);
1707 atomic_flag_clear_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT
1709 __o->clear(__m);
1716 atomic_thread_fence(memory_order __m) _NOEXCEPT
1718 __c11_atomic_thread_fence(__m);
1723 atomic_signal_fence(memory_order __m) _NOEXCEPT
1725 __c11_atomic_signal_fence(__m);