1// -*- C++ -*-
2//===--------------------------- atomic -----------------------------------===//
3//
4//                     The LLVM Compiler Infrastructure
5//
6// This file is distributed under the University of Illinois Open Source
7// License. See LICENSE.TXT for details.
8//
9//===----------------------------------------------------------------------===//
10
11#ifndef _LIBCPP_ATOMIC
12#define _LIBCPP_ATOMIC
13
14/*
15    atomic synopsis
16
17namespace std
18{
19
20// feature test macro
21
22#define __cpp_lib_atomic_is_always_lock_free // as specified by SG10
23
24// order and consistency
25
26typedef enum memory_order
27{
28    memory_order_relaxed,
29    memory_order_consume,  // load-consume
30    memory_order_acquire,  // load-acquire
31    memory_order_release,  // store-release
32    memory_order_acq_rel,  // store-release load-acquire
33    memory_order_seq_cst   // store-release load-acquire
34} memory_order;
35
36template <class T> T kill_dependency(T y) noexcept;
37
38// lock-free property
39
40#define ATOMIC_BOOL_LOCK_FREE unspecified
41#define ATOMIC_CHAR_LOCK_FREE unspecified
42#define ATOMIC_CHAR16_T_LOCK_FREE unspecified
43#define ATOMIC_CHAR32_T_LOCK_FREE unspecified
44#define ATOMIC_WCHAR_T_LOCK_FREE unspecified
45#define ATOMIC_SHORT_LOCK_FREE unspecified
46#define ATOMIC_INT_LOCK_FREE unspecified
47#define ATOMIC_LONG_LOCK_FREE unspecified
48#define ATOMIC_LLONG_LOCK_FREE unspecified
49#define ATOMIC_POINTER_LOCK_FREE unspecified
50
51// flag type and operations
52
53typedef struct atomic_flag
54{
55    bool test_and_set(memory_order m = memory_order_seq_cst) volatile noexcept;
56    bool test_and_set(memory_order m = memory_order_seq_cst) noexcept;
57    void clear(memory_order m = memory_order_seq_cst) volatile noexcept;
58    void clear(memory_order m = memory_order_seq_cst) noexcept;
59    atomic_flag()  noexcept = default;
60    atomic_flag(const atomic_flag&) = delete;
61    atomic_flag& operator=(const atomic_flag&) = delete;
62    atomic_flag& operator=(const atomic_flag&) volatile = delete;
63} atomic_flag;
64
65bool
66    atomic_flag_test_and_set(volatile atomic_flag* obj) noexcept;
67
68bool
69    atomic_flag_test_and_set(atomic_flag* obj) noexcept;
70
71bool
72    atomic_flag_test_and_set_explicit(volatile atomic_flag* obj,
73                                      memory_order m) noexcept;
74
75bool
76    atomic_flag_test_and_set_explicit(atomic_flag* obj, memory_order m) noexcept;
77
78void
79    atomic_flag_clear(volatile atomic_flag* obj) noexcept;
80
81void
82    atomic_flag_clear(atomic_flag* obj) noexcept;
83
84void
85    atomic_flag_clear_explicit(volatile atomic_flag* obj, memory_order m) noexcept;
86
87void
88    atomic_flag_clear_explicit(atomic_flag* obj, memory_order m) noexcept;
89
90#define ATOMIC_FLAG_INIT see below
91#define ATOMIC_VAR_INIT(value) see below
92
93template <class T>
94struct atomic
95{
96    static constexpr bool is_always_lock_free;
97    bool is_lock_free() const volatile noexcept;
98    bool is_lock_free() const noexcept;
99    void store(T desr, memory_order m = memory_order_seq_cst) volatile noexcept;
100    void store(T desr, memory_order m = memory_order_seq_cst) noexcept;
101    T load(memory_order m = memory_order_seq_cst) const volatile noexcept;
102    T load(memory_order m = memory_order_seq_cst) const noexcept;
103    operator T() const volatile noexcept;
104    operator T() const noexcept;
105    T exchange(T desr, memory_order m = memory_order_seq_cst) volatile noexcept;
106    T exchange(T desr, memory_order m = memory_order_seq_cst) noexcept;
107    bool compare_exchange_weak(T& expc, T desr,
108                               memory_order s, memory_order f) volatile noexcept;
109    bool compare_exchange_weak(T& expc, T desr, memory_order s, memory_order f) noexcept;
110    bool compare_exchange_strong(T& expc, T desr,
111                                 memory_order s, memory_order f) volatile noexcept;
112    bool compare_exchange_strong(T& expc, T desr,
113                                 memory_order s, memory_order f) noexcept;
114    bool compare_exchange_weak(T& expc, T desr,
115                               memory_order m = memory_order_seq_cst) volatile noexcept;
116    bool compare_exchange_weak(T& expc, T desr,
117                               memory_order m = memory_order_seq_cst) noexcept;
118    bool compare_exchange_strong(T& expc, T desr,
119                                memory_order m = memory_order_seq_cst) volatile noexcept;
120    bool compare_exchange_strong(T& expc, T desr,
121                                 memory_order m = memory_order_seq_cst) noexcept;
122
123    atomic() noexcept = default;
124    constexpr atomic(T desr) noexcept;
125    atomic(const atomic&) = delete;
126    atomic& operator=(const atomic&) = delete;
127    atomic& operator=(const atomic&) volatile = delete;
128    T operator=(T) volatile noexcept;
129    T operator=(T) noexcept;
130};
131
132template <>
133struct atomic<integral>
134{
135    static constexpr bool is_always_lock_free;
136    bool is_lock_free() const volatile noexcept;
137    bool is_lock_free() const noexcept;
138    void store(integral desr, memory_order m = memory_order_seq_cst) volatile noexcept;
139    void store(integral desr, memory_order m = memory_order_seq_cst) noexcept;
140    integral load(memory_order m = memory_order_seq_cst) const volatile noexcept;
141    integral load(memory_order m = memory_order_seq_cst) const noexcept;
142    operator integral() const volatile noexcept;
143    operator integral() const noexcept;
144    integral exchange(integral desr,
145                      memory_order m = memory_order_seq_cst) volatile noexcept;
146    integral exchange(integral desr, memory_order m = memory_order_seq_cst) noexcept;
147    bool compare_exchange_weak(integral& expc, integral desr,
148                               memory_order s, memory_order f) volatile noexcept;
149    bool compare_exchange_weak(integral& expc, integral desr,
150                               memory_order s, memory_order f) noexcept;
151    bool compare_exchange_strong(integral& expc, integral desr,
152                                 memory_order s, memory_order f) volatile noexcept;
153    bool compare_exchange_strong(integral& expc, integral desr,
154                                 memory_order s, memory_order f) noexcept;
155    bool compare_exchange_weak(integral& expc, integral desr,
156                               memory_order m = memory_order_seq_cst) volatile noexcept;
157    bool compare_exchange_weak(integral& expc, integral desr,
158                               memory_order m = memory_order_seq_cst) noexcept;
159    bool compare_exchange_strong(integral& expc, integral desr,
160                                memory_order m = memory_order_seq_cst) volatile noexcept;
161    bool compare_exchange_strong(integral& expc, integral desr,
162                                 memory_order m = memory_order_seq_cst) noexcept;
163
164    integral
165        fetch_add(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
166    integral fetch_add(integral op, memory_order m = memory_order_seq_cst) noexcept;
167    integral
168        fetch_sub(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
169    integral fetch_sub(integral op, memory_order m = memory_order_seq_cst) noexcept;
170    integral
171        fetch_and(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
172    integral fetch_and(integral op, memory_order m = memory_order_seq_cst) noexcept;
173    integral
174        fetch_or(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
175    integral fetch_or(integral op, memory_order m = memory_order_seq_cst) noexcept;
176    integral
177        fetch_xor(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
178    integral fetch_xor(integral op, memory_order m = memory_order_seq_cst) noexcept;
179
180    atomic() noexcept = default;
181    constexpr atomic(integral desr) noexcept;
182    atomic(const atomic&) = delete;
183    atomic& operator=(const atomic&) = delete;
184    atomic& operator=(const atomic&) volatile = delete;
185    integral operator=(integral desr) volatile noexcept;
186    integral operator=(integral desr) noexcept;
187
188    integral operator++(int) volatile noexcept;
189    integral operator++(int) noexcept;
190    integral operator--(int) volatile noexcept;
191    integral operator--(int) noexcept;
192    integral operator++() volatile noexcept;
193    integral operator++() noexcept;
194    integral operator--() volatile noexcept;
195    integral operator--() noexcept;
196    integral operator+=(integral op) volatile noexcept;
197    integral operator+=(integral op) noexcept;
198    integral operator-=(integral op) volatile noexcept;
199    integral operator-=(integral op) noexcept;
200    integral operator&=(integral op) volatile noexcept;
201    integral operator&=(integral op) noexcept;
202    integral operator|=(integral op) volatile noexcept;
203    integral operator|=(integral op) noexcept;
204    integral operator^=(integral op) volatile noexcept;
205    integral operator^=(integral op) noexcept;
206};
207
208template <class T>
209struct atomic<T*>
210{
211    static constexpr bool is_always_lock_free;
212    bool is_lock_free() const volatile noexcept;
213    bool is_lock_free() const noexcept;
214    void store(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept;
215    void store(T* desr, memory_order m = memory_order_seq_cst) noexcept;
216    T* load(memory_order m = memory_order_seq_cst) const volatile noexcept;
217    T* load(memory_order m = memory_order_seq_cst) const noexcept;
218    operator T*() const volatile noexcept;
219    operator T*() const noexcept;
220    T* exchange(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept;
221    T* exchange(T* desr, memory_order m = memory_order_seq_cst) noexcept;
222    bool compare_exchange_weak(T*& expc, T* desr,
223                               memory_order s, memory_order f) volatile noexcept;
224    bool compare_exchange_weak(T*& expc, T* desr,
225                               memory_order s, memory_order f) noexcept;
226    bool compare_exchange_strong(T*& expc, T* desr,
227                                 memory_order s, memory_order f) volatile noexcept;
228    bool compare_exchange_strong(T*& expc, T* desr,
229                                 memory_order s, memory_order f) noexcept;
230    bool compare_exchange_weak(T*& expc, T* desr,
231                               memory_order m = memory_order_seq_cst) volatile noexcept;
232    bool compare_exchange_weak(T*& expc, T* desr,
233                               memory_order m = memory_order_seq_cst) noexcept;
234    bool compare_exchange_strong(T*& expc, T* desr,
235                                memory_order m = memory_order_seq_cst) volatile noexcept;
236    bool compare_exchange_strong(T*& expc, T* desr,
237                                 memory_order m = memory_order_seq_cst) noexcept;
238    T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept;
239    T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept;
240    T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept;
241    T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept;
242
243    atomic() noexcept = default;
244    constexpr atomic(T* desr) noexcept;
245    atomic(const atomic&) = delete;
246    atomic& operator=(const atomic&) = delete;
247    atomic& operator=(const atomic&) volatile = delete;
248
249    T* operator=(T*) volatile noexcept;
250    T* operator=(T*) noexcept;
251    T* operator++(int) volatile noexcept;
252    T* operator++(int) noexcept;
253    T* operator--(int) volatile noexcept;
254    T* operator--(int) noexcept;
255    T* operator++() volatile noexcept;
256    T* operator++() noexcept;
257    T* operator--() volatile noexcept;
258    T* operator--() noexcept;
259    T* operator+=(ptrdiff_t op) volatile noexcept;
260    T* operator+=(ptrdiff_t op) noexcept;
261    T* operator-=(ptrdiff_t op) volatile noexcept;
262    T* operator-=(ptrdiff_t op) noexcept;
263};
264
265
266template <class T>
267    bool
268    atomic_is_lock_free(const volatile atomic<T>* obj) noexcept;
269
270template <class T>
271    bool
272    atomic_is_lock_free(const atomic<T>* obj) noexcept;
273
274template <class T>
275    void
276    atomic_init(volatile atomic<T>* obj, T desr) noexcept;
277
278template <class T>
279    void
280    atomic_init(atomic<T>* obj, T desr) noexcept;
281
282template <class T>
283    void
284    atomic_store(volatile atomic<T>* obj, T desr) noexcept;
285
286template <class T>
287    void
288    atomic_store(atomic<T>* obj, T desr) noexcept;
289
290template <class T>
291    void
292    atomic_store_explicit(volatile atomic<T>* obj, T desr, memory_order m) noexcept;
293
294template <class T>
295    void
296    atomic_store_explicit(atomic<T>* obj, T desr, memory_order m) noexcept;
297
298template <class T>
299    T
300    atomic_load(const volatile atomic<T>* obj) noexcept;
301
302template <class T>
303    T
304    atomic_load(const atomic<T>* obj) noexcept;
305
306template <class T>
307    T
308    atomic_load_explicit(const volatile atomic<T>* obj, memory_order m) noexcept;
309
310template <class T>
311    T
312    atomic_load_explicit(const atomic<T>* obj, memory_order m) noexcept;
313
314template <class T>
315    T
316    atomic_exchange(volatile atomic<T>* obj, T desr) noexcept;
317
318template <class T>
319    T
320    atomic_exchange(atomic<T>* obj, T desr) noexcept;
321
322template <class T>
323    T
324    atomic_exchange_explicit(volatile atomic<T>* obj, T desr, memory_order m) noexcept;
325
326template <class T>
327    T
328    atomic_exchange_explicit(atomic<T>* obj, T desr, memory_order m) noexcept;
329
330template <class T>
331    bool
332    atomic_compare_exchange_weak(volatile atomic<T>* obj, T* expc, T desr) noexcept;
333
334template <class T>
335    bool
336    atomic_compare_exchange_weak(atomic<T>* obj, T* expc, T desr) noexcept;
337
338template <class T>
339    bool
340    atomic_compare_exchange_strong(volatile atomic<T>* obj, T* expc, T desr) noexcept;
341
342template <class T>
343    bool
344    atomic_compare_exchange_strong(atomic<T>* obj, T* expc, T desr) noexcept;
345
346template <class T>
347    bool
348    atomic_compare_exchange_weak_explicit(volatile atomic<T>* obj, T* expc,
349                                          T desr,
350                                          memory_order s, memory_order f) noexcept;
351
352template <class T>
353    bool
354    atomic_compare_exchange_weak_explicit(atomic<T>* obj, T* expc, T desr,
355                                          memory_order s, memory_order f) noexcept;
356
357template <class T>
358    bool
359    atomic_compare_exchange_strong_explicit(volatile atomic<T>* obj,
360                                            T* expc, T desr,
361                                            memory_order s, memory_order f) noexcept;
362
363template <class T>
364    bool
365    atomic_compare_exchange_strong_explicit(atomic<T>* obj, T* expc,
366                                            T desr,
367                                            memory_order s, memory_order f) noexcept;
368
369template <class Integral>
370    Integral
371    atomic_fetch_add(volatile atomic<Integral>* obj, Integral op) noexcept;
372
373template <class Integral>
374    Integral
375    atomic_fetch_add(atomic<Integral>* obj, Integral op) noexcept;
376
377template <class Integral>
378    Integral
379    atomic_fetch_add_explicit(volatile atomic<Integral>* obj, Integral op,
380                              memory_order m) noexcept;
381template <class Integral>
382    Integral
383    atomic_fetch_add_explicit(atomic<Integral>* obj, Integral op,
384                              memory_order m) noexcept;
385template <class Integral>
386    Integral
387    atomic_fetch_sub(volatile atomic<Integral>* obj, Integral op) noexcept;
388
389template <class Integral>
390    Integral
391    atomic_fetch_sub(atomic<Integral>* obj, Integral op) noexcept;
392
393template <class Integral>
394    Integral
395    atomic_fetch_sub_explicit(volatile atomic<Integral>* obj, Integral op,
396                              memory_order m) noexcept;
397template <class Integral>
398    Integral
399    atomic_fetch_sub_explicit(atomic<Integral>* obj, Integral op,
400                              memory_order m) noexcept;
401template <class Integral>
402    Integral
403    atomic_fetch_and(volatile atomic<Integral>* obj, Integral op) noexcept;
404
405template <class Integral>
406    Integral
407    atomic_fetch_and(atomic<Integral>* obj, Integral op) noexcept;
408
409template <class Integral>
410    Integral
411    atomic_fetch_and_explicit(volatile atomic<Integral>* obj, Integral op,
412                              memory_order m) noexcept;
413template <class Integral>
414    Integral
415    atomic_fetch_and_explicit(atomic<Integral>* obj, Integral op,
416                              memory_order m) noexcept;
417template <class Integral>
418    Integral
419    atomic_fetch_or(volatile atomic<Integral>* obj, Integral op) noexcept;
420
421template <class Integral>
422    Integral
423    atomic_fetch_or(atomic<Integral>* obj, Integral op) noexcept;
424
425template <class Integral>
426    Integral
427    atomic_fetch_or_explicit(volatile atomic<Integral>* obj, Integral op,
428                             memory_order m) noexcept;
429template <class Integral>
430    Integral
431    atomic_fetch_or_explicit(atomic<Integral>* obj, Integral op,
432                             memory_order m) noexcept;
433template <class Integral>
434    Integral
435    atomic_fetch_xor(volatile atomic<Integral>* obj, Integral op) noexcept;
436
437template <class Integral>
438    Integral
439    atomic_fetch_xor(atomic<Integral>* obj, Integral op) noexcept;
440
441template <class Integral>
442    Integral
443    atomic_fetch_xor_explicit(volatile atomic<Integral>* obj, Integral op,
444                              memory_order m) noexcept;
445template <class Integral>
446    Integral
447    atomic_fetch_xor_explicit(atomic<Integral>* obj, Integral op,
448                              memory_order m) noexcept;
449
450template <class T>
451    T*
452    atomic_fetch_add(volatile atomic<T*>* obj, ptrdiff_t op) noexcept;
453
454template <class T>
455    T*
456    atomic_fetch_add(atomic<T*>* obj, ptrdiff_t op) noexcept;
457
458template <class T>
459    T*
460    atomic_fetch_add_explicit(volatile atomic<T*>* obj, ptrdiff_t op,
461                              memory_order m) noexcept;
462template <class T>
463    T*
464    atomic_fetch_add_explicit(atomic<T*>* obj, ptrdiff_t op, memory_order m) noexcept;
465
466template <class T>
467    T*
468    atomic_fetch_sub(volatile atomic<T*>* obj, ptrdiff_t op) noexcept;
469
470template <class T>
471    T*
472    atomic_fetch_sub(atomic<T*>* obj, ptrdiff_t op) noexcept;
473
474template <class T>
475    T*
476    atomic_fetch_sub_explicit(volatile atomic<T*>* obj, ptrdiff_t op,
477                              memory_order m) noexcept;
478template <class T>
479    T*
480    atomic_fetch_sub_explicit(atomic<T*>* obj, ptrdiff_t op, memory_order m) noexcept;
481
482// Atomics for standard typedef types
483
484typedef atomic<bool>               atomic_bool;
485typedef atomic<char>               atomic_char;
486typedef atomic<signed char>        atomic_schar;
487typedef atomic<unsigned char>      atomic_uchar;
488typedef atomic<short>              atomic_short;
489typedef atomic<unsigned short>     atomic_ushort;
490typedef atomic<int>                atomic_int;
491typedef atomic<unsigned int>       atomic_uint;
492typedef atomic<long>               atomic_long;
493typedef atomic<unsigned long>      atomic_ulong;
494typedef atomic<long long>          atomic_llong;
495typedef atomic<unsigned long long> atomic_ullong;
496typedef atomic<char16_t>           atomic_char16_t;
497typedef atomic<char32_t>           atomic_char32_t;
498typedef atomic<wchar_t>            atomic_wchar_t;
499
500typedef atomic<int_least8_t>   atomic_int_least8_t;
501typedef atomic<uint_least8_t>  atomic_uint_least8_t;
502typedef atomic<int_least16_t>  atomic_int_least16_t;
503typedef atomic<uint_least16_t> atomic_uint_least16_t;
504typedef atomic<int_least32_t>  atomic_int_least32_t;
505typedef atomic<uint_least32_t> atomic_uint_least32_t;
506typedef atomic<int_least64_t>  atomic_int_least64_t;
507typedef atomic<uint_least64_t> atomic_uint_least64_t;
508
509typedef atomic<int_fast8_t>   atomic_int_fast8_t;
510typedef atomic<uint_fast8_t>  atomic_uint_fast8_t;
511typedef atomic<int_fast16_t>  atomic_int_fast16_t;
512typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
513typedef atomic<int_fast32_t>  atomic_int_fast32_t;
514typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
515typedef atomic<int_fast64_t>  atomic_int_fast64_t;
516typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
517
518typedef atomic<int8_t>   atomic_int8_t;
519typedef atomic<uint8_t>  atomic_uint8_t;
520typedef atomic<int16_t>  atomic_int16_t;
521typedef atomic<uint16_t> atomic_uint16_t;
522typedef atomic<int32_t>  atomic_int32_t;
523typedef atomic<uint32_t> atomic_uint32_t;
524typedef atomic<int64_t>  atomic_int64_t;
525typedef atomic<uint64_t> atomic_uint64_t;
526
527typedef atomic<intptr_t>  atomic_intptr_t;
528typedef atomic<uintptr_t> atomic_uintptr_t;
529typedef atomic<size_t>    atomic_size_t;
530typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
531typedef atomic<intmax_t>  atomic_intmax_t;
532typedef atomic<uintmax_t> atomic_uintmax_t;
533
534// fences
535
536void atomic_thread_fence(memory_order m) noexcept;
537void atomic_signal_fence(memory_order m) noexcept;
538
539}  // std
540
541*/
542
543#include <__config>
544#include <cstddef>
545#include <cstdint>
546#include <type_traits>
547#include <version>
548
549#if !defined(_LIBCPP_HAS_NO_PRAGMA_SYSTEM_HEADER)
550#pragma GCC system_header
551#endif
552
553#ifdef _LIBCPP_HAS_NO_THREADS
554#error <atomic> is not supported on this single threaded system
555#endif
556#if !defined(_LIBCPP_HAS_C_ATOMIC_IMP) && !defined(_LIBCPP_HAS_GCC_ATOMIC_IMP)
557#error <atomic> is not implemented
558#endif
559#ifdef kill_dependency
560#error C++ standard library is incompatible with <stdatomic.h>
561#endif
562
563#define _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m) \
564  _LIBCPP_DIAGNOSE_WARNING(__m == memory_order_consume || \
565                           __m == memory_order_acquire || \
566                           __m == memory_order_acq_rel,   \
567                        "memory order argument to atomic operation is invalid")
568
569#define _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m) \
570  _LIBCPP_DIAGNOSE_WARNING(__m == memory_order_release || \
571                           __m == memory_order_acq_rel,   \
572                        "memory order argument to atomic operation is invalid")
573
574#define _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__m, __f) \
575  _LIBCPP_DIAGNOSE_WARNING(__f == memory_order_release || \
576                           __f == memory_order_acq_rel,   \
577                        "memory order argument to atomic operation is invalid")
578
579_LIBCPP_BEGIN_NAMESPACE_STD
580
581typedef enum memory_order
582{
583    memory_order_relaxed, memory_order_consume, memory_order_acquire,
584    memory_order_release, memory_order_acq_rel, memory_order_seq_cst
585} memory_order;
586
587#if defined(_LIBCPP_HAS_GCC_ATOMIC_IMP)
588namespace __gcc_atomic {
589template <typename _Tp>
590struct __gcc_atomic_t {
591
592#if _GNUC_VER >= 501
593    static_assert(is_trivially_copyable<_Tp>::value,
594      "std::atomic<Tp> requires that 'Tp' be a trivially copyable type");
595#endif
596
597  _LIBCPP_INLINE_VISIBILITY
598#ifndef _LIBCPP_CXX03_LANG
599    __gcc_atomic_t() _NOEXCEPT = default;
600#else
601    __gcc_atomic_t() _NOEXCEPT : __a_value() {}
602#endif // _LIBCPP_CXX03_LANG
603  _LIBCPP_CONSTEXPR explicit __gcc_atomic_t(_Tp value) _NOEXCEPT
604    : __a_value(value) {}
605  _Tp __a_value;
606};
607#define _Atomic(x) __gcc_atomic::__gcc_atomic_t<x>
608
609template <typename _Tp> _Tp __create();
610
611template <typename _Tp, typename _Td>
612typename enable_if<sizeof(_Tp()->__a_value = __create<_Td>()), char>::type
613    __test_atomic_assignable(int);
614template <typename _Tp, typename _Up>
615__two __test_atomic_assignable(...);
616
617template <typename _Tp, typename _Td>
618struct __can_assign {
619  static const bool value =
620      sizeof(__test_atomic_assignable<_Tp, _Td>(1)) == sizeof(char);
621};
622
623static inline _LIBCPP_CONSTEXPR int __to_gcc_order(memory_order __order) {
624  // Avoid switch statement to make this a constexpr.
625  return __order == memory_order_relaxed ? __ATOMIC_RELAXED:
626         (__order == memory_order_acquire ? __ATOMIC_ACQUIRE:
627          (__order == memory_order_release ? __ATOMIC_RELEASE:
628           (__order == memory_order_seq_cst ? __ATOMIC_SEQ_CST:
629            (__order == memory_order_acq_rel ? __ATOMIC_ACQ_REL:
630              __ATOMIC_CONSUME))));
631}
632
633static inline _LIBCPP_CONSTEXPR int __to_gcc_failure_order(memory_order __order) {
634  // Avoid switch statement to make this a constexpr.
635  return __order == memory_order_relaxed ? __ATOMIC_RELAXED:
636         (__order == memory_order_acquire ? __ATOMIC_ACQUIRE:
637          (__order == memory_order_release ? __ATOMIC_RELAXED:
638           (__order == memory_order_seq_cst ? __ATOMIC_SEQ_CST:
639            (__order == memory_order_acq_rel ? __ATOMIC_ACQUIRE:
640              __ATOMIC_CONSUME))));
641}
642
643} // namespace __gcc_atomic
644
645template <typename _Tp>
646static inline
647typename enable_if<
648    __gcc_atomic::__can_assign<volatile _Atomic(_Tp)*, _Tp>::value>::type
649__c11_atomic_init(volatile _Atomic(_Tp)* __a,  _Tp __val) {
650  __a->__a_value = __val;
651}
652
653template <typename _Tp>
654static inline
655typename enable_if<
656    !__gcc_atomic::__can_assign<volatile _Atomic(_Tp)*, _Tp>::value &&
657     __gcc_atomic::__can_assign<         _Atomic(_Tp)*, _Tp>::value>::type
658__c11_atomic_init(volatile _Atomic(_Tp)* __a,  _Tp __val) {
659  // [atomics.types.generic]p1 guarantees _Tp is trivially copyable. Because
660  // the default operator= in an object is not volatile, a byte-by-byte copy
661  // is required.
662  volatile char* to = reinterpret_cast<volatile char*>(&__a->__a_value);
663  volatile char* end = to + sizeof(_Tp);
664  char* from = reinterpret_cast<char*>(&__val);
665  while (to != end) {
666    *to++ = *from++;
667  }
668}
669
670template <typename _Tp>
671static inline void __c11_atomic_init(_Atomic(_Tp)* __a,  _Tp __val) {
672  __a->__a_value = __val;
673}
674
675static inline void __c11_atomic_thread_fence(memory_order __order) {
676  __atomic_thread_fence(__gcc_atomic::__to_gcc_order(__order));
677}
678
679static inline void __c11_atomic_signal_fence(memory_order __order) {
680  __atomic_signal_fence(__gcc_atomic::__to_gcc_order(__order));
681}
682
683template <typename _Tp>
684static inline void __c11_atomic_store(volatile _Atomic(_Tp)* __a,  _Tp __val,
685                                      memory_order __order) {
686  return __atomic_store(&__a->__a_value, &__val,
687                        __gcc_atomic::__to_gcc_order(__order));
688}
689
690template <typename _Tp>
691static inline void __c11_atomic_store(_Atomic(_Tp)* __a,  _Tp __val,
692                                      memory_order __order) {
693  __atomic_store(&__a->__a_value, &__val,
694                 __gcc_atomic::__to_gcc_order(__order));
695}
696
697template <typename _Tp>
698static inline _Tp __c11_atomic_load(const volatile _Atomic(_Tp)* __a,
699                                    memory_order __order) {
700  _Tp __ret;
701  __atomic_load(&__a->__a_value, &__ret,
702                __gcc_atomic::__to_gcc_order(__order));
703  return __ret;
704}
705
706template <typename _Tp>
707static inline _Tp __c11_atomic_load(const _Atomic(_Tp)* __a, memory_order __order) {
708  _Tp __ret;
709  __atomic_load(&__a->__a_value, &__ret,
710                __gcc_atomic::__to_gcc_order(__order));
711  return __ret;
712}
713
714template <typename _Tp>
715static inline _Tp __c11_atomic_exchange(volatile _Atomic(_Tp)* __a,
716                                        _Tp __value, memory_order __order) {
717  _Tp __ret;
718  __atomic_exchange(&__a->__a_value, &__value, &__ret,
719                    __gcc_atomic::__to_gcc_order(__order));
720  return __ret;
721}
722
723template <typename _Tp>
724static inline _Tp __c11_atomic_exchange(_Atomic(_Tp)* __a, _Tp __value,
725                                        memory_order __order) {
726  _Tp __ret;
727  __atomic_exchange(&__a->__a_value, &__value, &__ret,
728                    __gcc_atomic::__to_gcc_order(__order));
729  return __ret;
730}
731
732template <typename _Tp>
733static inline bool __c11_atomic_compare_exchange_strong(
734    volatile _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value,
735    memory_order __success, memory_order __failure) {
736  return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
737                                   false,
738                                   __gcc_atomic::__to_gcc_order(__success),
739                                   __gcc_atomic::__to_gcc_failure_order(__failure));
740}
741
742template <typename _Tp>
743static inline bool __c11_atomic_compare_exchange_strong(
744    _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value, memory_order __success,
745    memory_order __failure) {
746  return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
747                                   false,
748                                   __gcc_atomic::__to_gcc_order(__success),
749                                   __gcc_atomic::__to_gcc_failure_order(__failure));
750}
751
752template <typename _Tp>
753static inline bool __c11_atomic_compare_exchange_weak(
754    volatile _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value,
755    memory_order __success, memory_order __failure) {
756  return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
757                                   true,
758                                   __gcc_atomic::__to_gcc_order(__success),
759                                   __gcc_atomic::__to_gcc_failure_order(__failure));
760}
761
762template <typename _Tp>
763static inline bool __c11_atomic_compare_exchange_weak(
764    _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value, memory_order __success,
765    memory_order __failure) {
766  return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
767                                   true,
768                                   __gcc_atomic::__to_gcc_order(__success),
769                                   __gcc_atomic::__to_gcc_failure_order(__failure));
770}
771
772template <typename _Tp>
773struct __skip_amt { enum {value = 1}; };
774
775template <typename _Tp>
776struct __skip_amt<_Tp*> { enum {value = sizeof(_Tp)}; };
777
778// FIXME: Haven't figured out what the spec says about using arrays with
779// atomic_fetch_add. Force a failure rather than creating bad behavior.
780template <typename _Tp>
781struct __skip_amt<_Tp[]> { };
782template <typename _Tp, int n>
783struct __skip_amt<_Tp[n]> { };
784
785template <typename _Tp, typename _Td>
786static inline _Tp __c11_atomic_fetch_add(volatile _Atomic(_Tp)* __a,
787                                         _Td __delta, memory_order __order) {
788  return __atomic_fetch_add(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
789                            __gcc_atomic::__to_gcc_order(__order));
790}
791
792template <typename _Tp, typename _Td>
793static inline _Tp __c11_atomic_fetch_add(_Atomic(_Tp)* __a, _Td __delta,
794                                         memory_order __order) {
795  return __atomic_fetch_add(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
796                            __gcc_atomic::__to_gcc_order(__order));
797}
798
799template <typename _Tp, typename _Td>
800static inline _Tp __c11_atomic_fetch_sub(volatile _Atomic(_Tp)* __a,
801                                         _Td __delta, memory_order __order) {
802  return __atomic_fetch_sub(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
803                            __gcc_atomic::__to_gcc_order(__order));
804}
805
806template <typename _Tp, typename _Td>
807static inline _Tp __c11_atomic_fetch_sub(_Atomic(_Tp)* __a, _Td __delta,
808                                         memory_order __order) {
809  return __atomic_fetch_sub(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
810                            __gcc_atomic::__to_gcc_order(__order));
811}
812
813template <typename _Tp>
814static inline _Tp __c11_atomic_fetch_and(volatile _Atomic(_Tp)* __a,
815                                         _Tp __pattern, memory_order __order) {
816  return __atomic_fetch_and(&__a->__a_value, __pattern,
817                            __gcc_atomic::__to_gcc_order(__order));
818}
819
820template <typename _Tp>
821static inline _Tp __c11_atomic_fetch_and(_Atomic(_Tp)* __a,
822                                         _Tp __pattern, memory_order __order) {
823  return __atomic_fetch_and(&__a->__a_value, __pattern,
824                            __gcc_atomic::__to_gcc_order(__order));
825}
826
827template <typename _Tp>
828static inline _Tp __c11_atomic_fetch_or(volatile _Atomic(_Tp)* __a,
829                                        _Tp __pattern, memory_order __order) {
830  return __atomic_fetch_or(&__a->__a_value, __pattern,
831                           __gcc_atomic::__to_gcc_order(__order));
832}
833
834template <typename _Tp>
835static inline _Tp __c11_atomic_fetch_or(_Atomic(_Tp)* __a, _Tp __pattern,
836                                        memory_order __order) {
837  return __atomic_fetch_or(&__a->__a_value, __pattern,
838                           __gcc_atomic::__to_gcc_order(__order));
839}
840
841template <typename _Tp>
842static inline _Tp __c11_atomic_fetch_xor(volatile _Atomic(_Tp)* __a,
843                                         _Tp __pattern, memory_order __order) {
844  return __atomic_fetch_xor(&__a->__a_value, __pattern,
845                            __gcc_atomic::__to_gcc_order(__order));
846}
847
848template <typename _Tp>
849static inline _Tp __c11_atomic_fetch_xor(_Atomic(_Tp)* __a, _Tp __pattern,
850                                         memory_order __order) {
851  return __atomic_fetch_xor(&__a->__a_value, __pattern,
852                            __gcc_atomic::__to_gcc_order(__order));
853}
854#endif // _LIBCPP_HAS_GCC_ATOMIC_IMP
855
856template <class _Tp>
857inline _LIBCPP_INLINE_VISIBILITY
858_Tp
859kill_dependency(_Tp __y) _NOEXCEPT
860{
861    return __y;
862}
863
864#if defined(__CLANG_ATOMIC_BOOL_LOCK_FREE)
865# define ATOMIC_BOOL_LOCK_FREE      __CLANG_ATOMIC_BOOL_LOCK_FREE
866# define ATOMIC_CHAR_LOCK_FREE      __CLANG_ATOMIC_CHAR_LOCK_FREE
867# define ATOMIC_CHAR16_T_LOCK_FREE  __CLANG_ATOMIC_CHAR16_T_LOCK_FREE
868# define ATOMIC_CHAR32_T_LOCK_FREE  __CLANG_ATOMIC_CHAR32_T_LOCK_FREE
869# define ATOMIC_WCHAR_T_LOCK_FREE   __CLANG_ATOMIC_WCHAR_T_LOCK_FREE
870# define ATOMIC_SHORT_LOCK_FREE     __CLANG_ATOMIC_SHORT_LOCK_FREE
871# define ATOMIC_INT_LOCK_FREE       __CLANG_ATOMIC_INT_LOCK_FREE
872# define ATOMIC_LONG_LOCK_FREE      __CLANG_ATOMIC_LONG_LOCK_FREE
873# define ATOMIC_LLONG_LOCK_FREE     __CLANG_ATOMIC_LLONG_LOCK_FREE
874# define ATOMIC_POINTER_LOCK_FREE   __CLANG_ATOMIC_POINTER_LOCK_FREE
875#else
876# define ATOMIC_BOOL_LOCK_FREE      __GCC_ATOMIC_BOOL_LOCK_FREE
877# define ATOMIC_CHAR_LOCK_FREE      __GCC_ATOMIC_CHAR_LOCK_FREE
878# define ATOMIC_CHAR16_T_LOCK_FREE  __GCC_ATOMIC_CHAR16_T_LOCK_FREE
879# define ATOMIC_CHAR32_T_LOCK_FREE  __GCC_ATOMIC_CHAR32_T_LOCK_FREE
880# define ATOMIC_WCHAR_T_LOCK_FREE   __GCC_ATOMIC_WCHAR_T_LOCK_FREE
881# define ATOMIC_SHORT_LOCK_FREE     __GCC_ATOMIC_SHORT_LOCK_FREE
882# define ATOMIC_INT_LOCK_FREE       __GCC_ATOMIC_INT_LOCK_FREE
883# define ATOMIC_LONG_LOCK_FREE      __GCC_ATOMIC_LONG_LOCK_FREE
884# define ATOMIC_LLONG_LOCK_FREE     __GCC_ATOMIC_LLONG_LOCK_FREE
885# define ATOMIC_POINTER_LOCK_FREE   __GCC_ATOMIC_POINTER_LOCK_FREE
886#endif
887
888// general atomic<T>
889
890template <class _Tp, bool = is_integral<_Tp>::value && !is_same<_Tp, bool>::value>
891struct __atomic_base  // false
892{
893    mutable _Atomic(_Tp) __a_;
894
895#if defined(__cpp_lib_atomic_is_always_lock_free)
896  static _LIBCPP_CONSTEXPR bool is_always_lock_free = __atomic_always_lock_free(sizeof(__a_), 0);
897#endif
898
899    _LIBCPP_INLINE_VISIBILITY
900    bool is_lock_free() const volatile _NOEXCEPT
901    {
902#if defined(_LIBCPP_HAS_C_ATOMIC_IMP)
903    return __c11_atomic_is_lock_free(sizeof(_Tp));
904#else
905    return __atomic_is_lock_free(sizeof(_Tp), 0);
906#endif
907    }
908    _LIBCPP_INLINE_VISIBILITY
909    bool is_lock_free() const _NOEXCEPT
910        {return static_cast<__atomic_base const volatile*>(this)->is_lock_free();}
911    _LIBCPP_INLINE_VISIBILITY
912    void store(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
913      _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m)
914        {__c11_atomic_store(&__a_, __d, __m);}
915    _LIBCPP_INLINE_VISIBILITY
916    void store(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
917      _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m)
918        {__c11_atomic_store(&__a_, __d, __m);}
919    _LIBCPP_INLINE_VISIBILITY
920    _Tp load(memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT
921      _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
922        {return __c11_atomic_load(&__a_, __m);}
923    _LIBCPP_INLINE_VISIBILITY
924    _Tp load(memory_order __m = memory_order_seq_cst) const _NOEXCEPT
925      _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
926        {return __c11_atomic_load(&__a_, __m);}
927    _LIBCPP_INLINE_VISIBILITY
928    operator _Tp() const volatile _NOEXCEPT {return load();}
929    _LIBCPP_INLINE_VISIBILITY
930    operator _Tp() const _NOEXCEPT          {return load();}
931    _LIBCPP_INLINE_VISIBILITY
932    _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
933        {return __c11_atomic_exchange(&__a_, __d, __m);}
934    _LIBCPP_INLINE_VISIBILITY
935    _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
936        {return __c11_atomic_exchange(&__a_, __d, __m);}
937    _LIBCPP_INLINE_VISIBILITY
938    bool compare_exchange_weak(_Tp& __e, _Tp __d,
939                               memory_order __s, memory_order __f) volatile _NOEXCEPT
940      _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
941        {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);}
942    _LIBCPP_INLINE_VISIBILITY
943    bool compare_exchange_weak(_Tp& __e, _Tp __d,
944                               memory_order __s, memory_order __f) _NOEXCEPT
945      _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
946        {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);}
947    _LIBCPP_INLINE_VISIBILITY
948    bool compare_exchange_strong(_Tp& __e, _Tp __d,
949                                 memory_order __s, memory_order __f) volatile _NOEXCEPT
950      _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
951        {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);}
952    _LIBCPP_INLINE_VISIBILITY
953    bool compare_exchange_strong(_Tp& __e, _Tp __d,
954                                 memory_order __s, memory_order __f) _NOEXCEPT
955      _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
956        {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);}
957    _LIBCPP_INLINE_VISIBILITY
958    bool compare_exchange_weak(_Tp& __e, _Tp __d,
959                              memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
960        {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);}
961    _LIBCPP_INLINE_VISIBILITY
962    bool compare_exchange_weak(_Tp& __e, _Tp __d,
963                               memory_order __m = memory_order_seq_cst) _NOEXCEPT
964        {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);}
965    _LIBCPP_INLINE_VISIBILITY
966    bool compare_exchange_strong(_Tp& __e, _Tp __d,
967                              memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
968        {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);}
969    _LIBCPP_INLINE_VISIBILITY
970    bool compare_exchange_strong(_Tp& __e, _Tp __d,
971                                 memory_order __m = memory_order_seq_cst) _NOEXCEPT
972        {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);}
973
974    _LIBCPP_INLINE_VISIBILITY
975#ifndef _LIBCPP_CXX03_LANG
976    __atomic_base() _NOEXCEPT = default;
977#else
978    __atomic_base() _NOEXCEPT : __a_() {}
979#endif // _LIBCPP_CXX03_LANG
980
981    _LIBCPP_INLINE_VISIBILITY
982    _LIBCPP_CONSTEXPR __atomic_base(_Tp __d) _NOEXCEPT : __a_(__d) {}
983#ifndef _LIBCPP_CXX03_LANG
984    __atomic_base(const __atomic_base&) = delete;
985    __atomic_base& operator=(const __atomic_base&) = delete;
986    __atomic_base& operator=(const __atomic_base&) volatile = delete;
987#else
988private:
989    __atomic_base(const __atomic_base&);
990    __atomic_base& operator=(const __atomic_base&);
991    __atomic_base& operator=(const __atomic_base&) volatile;
992#endif
993};
994
995#if defined(__cpp_lib_atomic_is_always_lock_free)
996template <class _Tp, bool __b>
997_LIBCPP_CONSTEXPR bool __atomic_base<_Tp, __b>::is_always_lock_free;
998#endif
999
1000// atomic<Integral>
1001
1002template <class _Tp>
1003struct __atomic_base<_Tp, true>
1004    : public __atomic_base<_Tp, false>
1005{
1006    typedef __atomic_base<_Tp, false> __base;
1007    _LIBCPP_INLINE_VISIBILITY
1008    __atomic_base() _NOEXCEPT _LIBCPP_DEFAULT
1009    _LIBCPP_INLINE_VISIBILITY
1010    _LIBCPP_CONSTEXPR __atomic_base(_Tp __d) _NOEXCEPT : __base(__d) {}
1011
1012    _LIBCPP_INLINE_VISIBILITY
1013    _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1014        {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
1015    _LIBCPP_INLINE_VISIBILITY
1016    _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1017        {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
1018    _LIBCPP_INLINE_VISIBILITY
1019    _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1020        {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
1021    _LIBCPP_INLINE_VISIBILITY
1022    _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1023        {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
1024    _LIBCPP_INLINE_VISIBILITY
1025    _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1026        {return __c11_atomic_fetch_and(&this->__a_, __op, __m);}
1027    _LIBCPP_INLINE_VISIBILITY
1028    _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1029        {return __c11_atomic_fetch_and(&this->__a_, __op, __m);}
1030    _LIBCPP_INLINE_VISIBILITY
1031    _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1032        {return __c11_atomic_fetch_or(&this->__a_, __op, __m);}
1033    _LIBCPP_INLINE_VISIBILITY
1034    _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1035        {return __c11_atomic_fetch_or(&this->__a_, __op, __m);}
1036    _LIBCPP_INLINE_VISIBILITY
1037    _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1038        {return __c11_atomic_fetch_xor(&this->__a_, __op, __m);}
1039    _LIBCPP_INLINE_VISIBILITY
1040    _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1041        {return __c11_atomic_fetch_xor(&this->__a_, __op, __m);}
1042
1043    _LIBCPP_INLINE_VISIBILITY
1044    _Tp operator++(int) volatile _NOEXCEPT      {return fetch_add(_Tp(1));}
1045    _LIBCPP_INLINE_VISIBILITY
1046    _Tp operator++(int) _NOEXCEPT               {return fetch_add(_Tp(1));}
1047    _LIBCPP_INLINE_VISIBILITY
1048    _Tp operator--(int) volatile _NOEXCEPT      {return fetch_sub(_Tp(1));}
1049    _LIBCPP_INLINE_VISIBILITY
1050    _Tp operator--(int) _NOEXCEPT               {return fetch_sub(_Tp(1));}
1051    _LIBCPP_INLINE_VISIBILITY
1052    _Tp operator++() volatile _NOEXCEPT         {return fetch_add(_Tp(1)) + _Tp(1);}
1053    _LIBCPP_INLINE_VISIBILITY
1054    _Tp operator++() _NOEXCEPT                  {return fetch_add(_Tp(1)) + _Tp(1);}
1055    _LIBCPP_INLINE_VISIBILITY
1056    _Tp operator--() volatile _NOEXCEPT         {return fetch_sub(_Tp(1)) - _Tp(1);}
1057    _LIBCPP_INLINE_VISIBILITY
1058    _Tp operator--() _NOEXCEPT                  {return fetch_sub(_Tp(1)) - _Tp(1);}
1059    _LIBCPP_INLINE_VISIBILITY
1060    _Tp operator+=(_Tp __op) volatile _NOEXCEPT {return fetch_add(__op) + __op;}
1061    _LIBCPP_INLINE_VISIBILITY
1062    _Tp operator+=(_Tp __op) _NOEXCEPT          {return fetch_add(__op) + __op;}
1063    _LIBCPP_INLINE_VISIBILITY
1064    _Tp operator-=(_Tp __op) volatile _NOEXCEPT {return fetch_sub(__op) - __op;}
1065    _LIBCPP_INLINE_VISIBILITY
1066    _Tp operator-=(_Tp __op) _NOEXCEPT          {return fetch_sub(__op) - __op;}
1067    _LIBCPP_INLINE_VISIBILITY
1068    _Tp operator&=(_Tp __op) volatile _NOEXCEPT {return fetch_and(__op) & __op;}
1069    _LIBCPP_INLINE_VISIBILITY
1070    _Tp operator&=(_Tp __op) _NOEXCEPT          {return fetch_and(__op) & __op;}
1071    _LIBCPP_INLINE_VISIBILITY
1072    _Tp operator|=(_Tp __op) volatile _NOEXCEPT {return fetch_or(__op) | __op;}
1073    _LIBCPP_INLINE_VISIBILITY
1074    _Tp operator|=(_Tp __op) _NOEXCEPT          {return fetch_or(__op) | __op;}
1075    _LIBCPP_INLINE_VISIBILITY
1076    _Tp operator^=(_Tp __op) volatile _NOEXCEPT {return fetch_xor(__op) ^ __op;}
1077    _LIBCPP_INLINE_VISIBILITY
1078    _Tp operator^=(_Tp __op) _NOEXCEPT          {return fetch_xor(__op) ^ __op;}
1079};
1080
1081// atomic<T>
1082
1083template <class _Tp>
1084struct atomic
1085    : public __atomic_base<_Tp>
1086{
1087    typedef __atomic_base<_Tp> __base;
1088    _LIBCPP_INLINE_VISIBILITY
1089    atomic() _NOEXCEPT _LIBCPP_DEFAULT
1090    _LIBCPP_INLINE_VISIBILITY
1091    _LIBCPP_CONSTEXPR atomic(_Tp __d) _NOEXCEPT : __base(__d) {}
1092
1093    _LIBCPP_INLINE_VISIBILITY
1094    _Tp operator=(_Tp __d) volatile _NOEXCEPT
1095        {__base::store(__d); return __d;}
1096    _LIBCPP_INLINE_VISIBILITY
1097    _Tp operator=(_Tp __d) _NOEXCEPT
1098        {__base::store(__d); return __d;}
1099};
1100
1101// atomic<T*>
1102
1103template <class _Tp>
1104struct atomic<_Tp*>
1105    : public __atomic_base<_Tp*>
1106{
1107    typedef __atomic_base<_Tp*> __base;
1108    _LIBCPP_INLINE_VISIBILITY
1109    atomic() _NOEXCEPT _LIBCPP_DEFAULT
1110    _LIBCPP_INLINE_VISIBILITY
1111    _LIBCPP_CONSTEXPR atomic(_Tp* __d) _NOEXCEPT : __base(__d) {}
1112
1113    _LIBCPP_INLINE_VISIBILITY
1114    _Tp* operator=(_Tp* __d) volatile _NOEXCEPT
1115        {__base::store(__d); return __d;}
1116    _LIBCPP_INLINE_VISIBILITY
1117    _Tp* operator=(_Tp* __d) _NOEXCEPT
1118        {__base::store(__d); return __d;}
1119
1120    _LIBCPP_INLINE_VISIBILITY
1121    _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst)
1122                                                                        volatile _NOEXCEPT
1123        {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
1124    _LIBCPP_INLINE_VISIBILITY
1125    _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1126        {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
1127    _LIBCPP_INLINE_VISIBILITY
1128    _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst)
1129                                                                        volatile _NOEXCEPT
1130        {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
1131    _LIBCPP_INLINE_VISIBILITY
1132    _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1133        {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
1134
1135    _LIBCPP_INLINE_VISIBILITY
1136    _Tp* operator++(int) volatile _NOEXCEPT            {return fetch_add(1);}
1137    _LIBCPP_INLINE_VISIBILITY
1138    _Tp* operator++(int) _NOEXCEPT                     {return fetch_add(1);}
1139    _LIBCPP_INLINE_VISIBILITY
1140    _Tp* operator--(int) volatile _NOEXCEPT            {return fetch_sub(1);}
1141    _LIBCPP_INLINE_VISIBILITY
1142    _Tp* operator--(int) _NOEXCEPT                     {return fetch_sub(1);}
1143    _LIBCPP_INLINE_VISIBILITY
1144    _Tp* operator++() volatile _NOEXCEPT               {return fetch_add(1) + 1;}
1145    _LIBCPP_INLINE_VISIBILITY
1146    _Tp* operator++() _NOEXCEPT                        {return fetch_add(1) + 1;}
1147    _LIBCPP_INLINE_VISIBILITY
1148    _Tp* operator--() volatile _NOEXCEPT               {return fetch_sub(1) - 1;}
1149    _LIBCPP_INLINE_VISIBILITY
1150    _Tp* operator--() _NOEXCEPT                        {return fetch_sub(1) - 1;}
1151    _LIBCPP_INLINE_VISIBILITY
1152    _Tp* operator+=(ptrdiff_t __op) volatile _NOEXCEPT {return fetch_add(__op) + __op;}
1153    _LIBCPP_INLINE_VISIBILITY
1154    _Tp* operator+=(ptrdiff_t __op) _NOEXCEPT          {return fetch_add(__op) + __op;}
1155    _LIBCPP_INLINE_VISIBILITY
1156    _Tp* operator-=(ptrdiff_t __op) volatile _NOEXCEPT {return fetch_sub(__op) - __op;}
1157    _LIBCPP_INLINE_VISIBILITY
1158    _Tp* operator-=(ptrdiff_t __op) _NOEXCEPT          {return fetch_sub(__op) - __op;}
1159};
1160
1161// atomic_is_lock_free
1162
1163template <class _Tp>
1164inline _LIBCPP_INLINE_VISIBILITY
1165bool
1166atomic_is_lock_free(const volatile atomic<_Tp>* __o) _NOEXCEPT
1167{
1168    return __o->is_lock_free();
1169}
1170
1171template <class _Tp>
1172inline _LIBCPP_INLINE_VISIBILITY
1173bool
1174atomic_is_lock_free(const atomic<_Tp>* __o) _NOEXCEPT
1175{
1176    return __o->is_lock_free();
1177}
1178
1179// atomic_init
1180
1181template <class _Tp>
1182inline _LIBCPP_INLINE_VISIBILITY
1183void
1184atomic_init(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1185{
1186    __c11_atomic_init(&__o->__a_, __d);
1187}
1188
1189template <class _Tp>
1190inline _LIBCPP_INLINE_VISIBILITY
1191void
1192atomic_init(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1193{
1194    __c11_atomic_init(&__o->__a_, __d);
1195}
1196
1197// atomic_store
1198
1199template <class _Tp>
1200inline _LIBCPP_INLINE_VISIBILITY
1201void
1202atomic_store(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1203{
1204    __o->store(__d);
1205}
1206
1207template <class _Tp>
1208inline _LIBCPP_INLINE_VISIBILITY
1209void
1210atomic_store(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1211{
1212    __o->store(__d);
1213}
1214
1215// atomic_store_explicit
1216
1217template <class _Tp>
1218inline _LIBCPP_INLINE_VISIBILITY
1219void
1220atomic_store_explicit(volatile atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1221  _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m)
1222{
1223    __o->store(__d, __m);
1224}
1225
1226template <class _Tp>
1227inline _LIBCPP_INLINE_VISIBILITY
1228void
1229atomic_store_explicit(atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1230  _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m)
1231{
1232    __o->store(__d, __m);
1233}
1234
1235// atomic_load
1236
1237template <class _Tp>
1238inline _LIBCPP_INLINE_VISIBILITY
1239_Tp
1240atomic_load(const volatile atomic<_Tp>* __o) _NOEXCEPT
1241{
1242    return __o->load();
1243}
1244
1245template <class _Tp>
1246inline _LIBCPP_INLINE_VISIBILITY
1247_Tp
1248atomic_load(const atomic<_Tp>* __o) _NOEXCEPT
1249{
1250    return __o->load();
1251}
1252
1253// atomic_load_explicit
1254
1255template <class _Tp>
1256inline _LIBCPP_INLINE_VISIBILITY
1257_Tp
1258atomic_load_explicit(const volatile atomic<_Tp>* __o, memory_order __m) _NOEXCEPT
1259  _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
1260{
1261    return __o->load(__m);
1262}
1263
1264template <class _Tp>
1265inline _LIBCPP_INLINE_VISIBILITY
1266_Tp
1267atomic_load_explicit(const atomic<_Tp>* __o, memory_order __m) _NOEXCEPT
1268  _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
1269{
1270    return __o->load(__m);
1271}
1272
1273// atomic_exchange
1274
1275template <class _Tp>
1276inline _LIBCPP_INLINE_VISIBILITY
1277_Tp
1278atomic_exchange(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1279{
1280    return __o->exchange(__d);
1281}
1282
1283template <class _Tp>
1284inline _LIBCPP_INLINE_VISIBILITY
1285_Tp
1286atomic_exchange(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1287{
1288    return __o->exchange(__d);
1289}
1290
1291// atomic_exchange_explicit
1292
1293template <class _Tp>
1294inline _LIBCPP_INLINE_VISIBILITY
1295_Tp
1296atomic_exchange_explicit(volatile atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1297{
1298    return __o->exchange(__d, __m);
1299}
1300
1301template <class _Tp>
1302inline _LIBCPP_INLINE_VISIBILITY
1303_Tp
1304atomic_exchange_explicit(atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1305{
1306    return __o->exchange(__d, __m);
1307}
1308
1309// atomic_compare_exchange_weak
1310
1311template <class _Tp>
1312inline _LIBCPP_INLINE_VISIBILITY
1313bool
1314atomic_compare_exchange_weak(volatile atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
1315{
1316    return __o->compare_exchange_weak(*__e, __d);
1317}
1318
1319template <class _Tp>
1320inline _LIBCPP_INLINE_VISIBILITY
1321bool
1322atomic_compare_exchange_weak(atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
1323{
1324    return __o->compare_exchange_weak(*__e, __d);
1325}
1326
1327// atomic_compare_exchange_strong
1328
1329template <class _Tp>
1330inline _LIBCPP_INLINE_VISIBILITY
1331bool
1332atomic_compare_exchange_strong(volatile atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
1333{
1334    return __o->compare_exchange_strong(*__e, __d);
1335}
1336
1337template <class _Tp>
1338inline _LIBCPP_INLINE_VISIBILITY
1339bool
1340atomic_compare_exchange_strong(atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
1341{
1342    return __o->compare_exchange_strong(*__e, __d);
1343}
1344
1345// atomic_compare_exchange_weak_explicit
1346
1347template <class _Tp>
1348inline _LIBCPP_INLINE_VISIBILITY
1349bool
1350atomic_compare_exchange_weak_explicit(volatile atomic<_Tp>* __o, _Tp* __e,
1351                                      _Tp __d,
1352                                      memory_order __s, memory_order __f) _NOEXCEPT
1353  _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
1354{
1355    return __o->compare_exchange_weak(*__e, __d, __s, __f);
1356}
1357
1358template <class _Tp>
1359inline _LIBCPP_INLINE_VISIBILITY
1360bool
1361atomic_compare_exchange_weak_explicit(atomic<_Tp>* __o, _Tp* __e, _Tp __d,
1362                                      memory_order __s, memory_order __f) _NOEXCEPT
1363  _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
1364{
1365    return __o->compare_exchange_weak(*__e, __d, __s, __f);
1366}
1367
1368// atomic_compare_exchange_strong_explicit
1369
1370template <class _Tp>
1371inline _LIBCPP_INLINE_VISIBILITY
1372bool
1373atomic_compare_exchange_strong_explicit(volatile atomic<_Tp>* __o,
1374                                        _Tp* __e, _Tp __d,
1375                                        memory_order __s, memory_order __f) _NOEXCEPT
1376  _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
1377{
1378    return __o->compare_exchange_strong(*__e, __d, __s, __f);
1379}
1380
1381template <class _Tp>
1382inline _LIBCPP_INLINE_VISIBILITY
1383bool
1384atomic_compare_exchange_strong_explicit(atomic<_Tp>* __o, _Tp* __e,
1385                                        _Tp __d,
1386                                        memory_order __s, memory_order __f) _NOEXCEPT
1387  _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
1388{
1389    return __o->compare_exchange_strong(*__e, __d, __s, __f);
1390}
1391
1392// atomic_fetch_add
1393
1394template <class _Tp>
1395inline _LIBCPP_INLINE_VISIBILITY
1396typename enable_if
1397<
1398    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1399    _Tp
1400>::type
1401atomic_fetch_add(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1402{
1403    return __o->fetch_add(__op);
1404}
1405
1406template <class _Tp>
1407inline _LIBCPP_INLINE_VISIBILITY
1408typename enable_if
1409<
1410    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1411    _Tp
1412>::type
1413atomic_fetch_add(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1414{
1415    return __o->fetch_add(__op);
1416}
1417
1418template <class _Tp>
1419inline _LIBCPP_INLINE_VISIBILITY
1420_Tp*
1421atomic_fetch_add(volatile atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
1422{
1423    return __o->fetch_add(__op);
1424}
1425
1426template <class _Tp>
1427inline _LIBCPP_INLINE_VISIBILITY
1428_Tp*
1429atomic_fetch_add(atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
1430{
1431    return __o->fetch_add(__op);
1432}
1433
1434// atomic_fetch_add_explicit
1435
1436template <class _Tp>
1437inline _LIBCPP_INLINE_VISIBILITY
1438typename enable_if
1439<
1440    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1441    _Tp
1442>::type
1443atomic_fetch_add_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1444{
1445    return __o->fetch_add(__op, __m);
1446}
1447
1448template <class _Tp>
1449inline _LIBCPP_INLINE_VISIBILITY
1450typename enable_if
1451<
1452    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1453    _Tp
1454>::type
1455atomic_fetch_add_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1456{
1457    return __o->fetch_add(__op, __m);
1458}
1459
1460template <class _Tp>
1461inline _LIBCPP_INLINE_VISIBILITY
1462_Tp*
1463atomic_fetch_add_explicit(volatile atomic<_Tp*>* __o, ptrdiff_t __op,
1464                          memory_order __m) _NOEXCEPT
1465{
1466    return __o->fetch_add(__op, __m);
1467}
1468
1469template <class _Tp>
1470inline _LIBCPP_INLINE_VISIBILITY
1471_Tp*
1472atomic_fetch_add_explicit(atomic<_Tp*>* __o, ptrdiff_t __op, memory_order __m) _NOEXCEPT
1473{
1474    return __o->fetch_add(__op, __m);
1475}
1476
1477// atomic_fetch_sub
1478
1479template <class _Tp>
1480inline _LIBCPP_INLINE_VISIBILITY
1481typename enable_if
1482<
1483    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1484    _Tp
1485>::type
1486atomic_fetch_sub(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1487{
1488    return __o->fetch_sub(__op);
1489}
1490
1491template <class _Tp>
1492inline _LIBCPP_INLINE_VISIBILITY
1493typename enable_if
1494<
1495    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1496    _Tp
1497>::type
1498atomic_fetch_sub(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1499{
1500    return __o->fetch_sub(__op);
1501}
1502
1503template <class _Tp>
1504inline _LIBCPP_INLINE_VISIBILITY
1505_Tp*
1506atomic_fetch_sub(volatile atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
1507{
1508    return __o->fetch_sub(__op);
1509}
1510
1511template <class _Tp>
1512inline _LIBCPP_INLINE_VISIBILITY
1513_Tp*
1514atomic_fetch_sub(atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
1515{
1516    return __o->fetch_sub(__op);
1517}
1518
1519// atomic_fetch_sub_explicit
1520
1521template <class _Tp>
1522inline _LIBCPP_INLINE_VISIBILITY
1523typename enable_if
1524<
1525    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1526    _Tp
1527>::type
1528atomic_fetch_sub_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1529{
1530    return __o->fetch_sub(__op, __m);
1531}
1532
1533template <class _Tp>
1534inline _LIBCPP_INLINE_VISIBILITY
1535typename enable_if
1536<
1537    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1538    _Tp
1539>::type
1540atomic_fetch_sub_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1541{
1542    return __o->fetch_sub(__op, __m);
1543}
1544
1545template <class _Tp>
1546inline _LIBCPP_INLINE_VISIBILITY
1547_Tp*
1548atomic_fetch_sub_explicit(volatile atomic<_Tp*>* __o, ptrdiff_t __op,
1549                          memory_order __m) _NOEXCEPT
1550{
1551    return __o->fetch_sub(__op, __m);
1552}
1553
1554template <class _Tp>
1555inline _LIBCPP_INLINE_VISIBILITY
1556_Tp*
1557atomic_fetch_sub_explicit(atomic<_Tp*>* __o, ptrdiff_t __op, memory_order __m) _NOEXCEPT
1558{
1559    return __o->fetch_sub(__op, __m);
1560}
1561
1562// atomic_fetch_and
1563
1564template <class _Tp>
1565inline _LIBCPP_INLINE_VISIBILITY
1566typename enable_if
1567<
1568    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1569    _Tp
1570>::type
1571atomic_fetch_and(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1572{
1573    return __o->fetch_and(__op);
1574}
1575
1576template <class _Tp>
1577inline _LIBCPP_INLINE_VISIBILITY
1578typename enable_if
1579<
1580    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1581    _Tp
1582>::type
1583atomic_fetch_and(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1584{
1585    return __o->fetch_and(__op);
1586}
1587
1588// atomic_fetch_and_explicit
1589
1590template <class _Tp>
1591inline _LIBCPP_INLINE_VISIBILITY
1592typename enable_if
1593<
1594    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1595    _Tp
1596>::type
1597atomic_fetch_and_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1598{
1599    return __o->fetch_and(__op, __m);
1600}
1601
1602template <class _Tp>
1603inline _LIBCPP_INLINE_VISIBILITY
1604typename enable_if
1605<
1606    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1607    _Tp
1608>::type
1609atomic_fetch_and_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1610{
1611    return __o->fetch_and(__op, __m);
1612}
1613
1614// atomic_fetch_or
1615
1616template <class _Tp>
1617inline _LIBCPP_INLINE_VISIBILITY
1618typename enable_if
1619<
1620    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1621    _Tp
1622>::type
1623atomic_fetch_or(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1624{
1625    return __o->fetch_or(__op);
1626}
1627
1628template <class _Tp>
1629inline _LIBCPP_INLINE_VISIBILITY
1630typename enable_if
1631<
1632    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1633    _Tp
1634>::type
1635atomic_fetch_or(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1636{
1637    return __o->fetch_or(__op);
1638}
1639
1640// atomic_fetch_or_explicit
1641
1642template <class _Tp>
1643inline _LIBCPP_INLINE_VISIBILITY
1644typename enable_if
1645<
1646    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1647    _Tp
1648>::type
1649atomic_fetch_or_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1650{
1651    return __o->fetch_or(__op, __m);
1652}
1653
1654template <class _Tp>
1655inline _LIBCPP_INLINE_VISIBILITY
1656typename enable_if
1657<
1658    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1659    _Tp
1660>::type
1661atomic_fetch_or_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1662{
1663    return __o->fetch_or(__op, __m);
1664}
1665
1666// atomic_fetch_xor
1667
1668template <class _Tp>
1669inline _LIBCPP_INLINE_VISIBILITY
1670typename enable_if
1671<
1672    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1673    _Tp
1674>::type
1675atomic_fetch_xor(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1676{
1677    return __o->fetch_xor(__op);
1678}
1679
1680template <class _Tp>
1681inline _LIBCPP_INLINE_VISIBILITY
1682typename enable_if
1683<
1684    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1685    _Tp
1686>::type
1687atomic_fetch_xor(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1688{
1689    return __o->fetch_xor(__op);
1690}
1691
1692// atomic_fetch_xor_explicit
1693
1694template <class _Tp>
1695inline _LIBCPP_INLINE_VISIBILITY
1696typename enable_if
1697<
1698    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1699    _Tp
1700>::type
1701atomic_fetch_xor_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1702{
1703    return __o->fetch_xor(__op, __m);
1704}
1705
1706template <class _Tp>
1707inline _LIBCPP_INLINE_VISIBILITY
1708typename enable_if
1709<
1710    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1711    _Tp
1712>::type
1713atomic_fetch_xor_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1714{
1715    return __o->fetch_xor(__op, __m);
1716}
1717
1718// flag type and operations
1719
1720typedef struct atomic_flag
1721{
1722    _Atomic(bool) __a_;
1723
1724    _LIBCPP_INLINE_VISIBILITY
1725    bool test_and_set(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1726        {return __c11_atomic_exchange(&__a_, true, __m);}
1727    _LIBCPP_INLINE_VISIBILITY
1728    bool test_and_set(memory_order __m = memory_order_seq_cst) _NOEXCEPT
1729        {return __c11_atomic_exchange(&__a_, true, __m);}
1730    _LIBCPP_INLINE_VISIBILITY
1731    void clear(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1732        {__c11_atomic_store(&__a_, false, __m);}
1733    _LIBCPP_INLINE_VISIBILITY
1734    void clear(memory_order __m = memory_order_seq_cst) _NOEXCEPT
1735        {__c11_atomic_store(&__a_, false, __m);}
1736
1737    _LIBCPP_INLINE_VISIBILITY
1738#ifndef _LIBCPP_CXX03_LANG
1739    atomic_flag() _NOEXCEPT = default;
1740#else
1741    atomic_flag() _NOEXCEPT : __a_() {}
1742#endif // _LIBCPP_CXX03_LANG
1743
1744    _LIBCPP_INLINE_VISIBILITY _LIBCPP_CONSTEXPR
1745    atomic_flag(bool __b) _NOEXCEPT : __a_(__b) {} // EXTENSION
1746
1747#ifndef _LIBCPP_CXX03_LANG
1748    atomic_flag(const atomic_flag&) = delete;
1749    atomic_flag& operator=(const atomic_flag&) = delete;
1750    atomic_flag& operator=(const atomic_flag&) volatile = delete;
1751#else
1752private:
1753    atomic_flag(const atomic_flag&);
1754    atomic_flag& operator=(const atomic_flag&);
1755    atomic_flag& operator=(const atomic_flag&) volatile;
1756#endif
1757} atomic_flag;
1758
1759inline _LIBCPP_INLINE_VISIBILITY
1760bool
1761atomic_flag_test_and_set(volatile atomic_flag* __o) _NOEXCEPT
1762{
1763    return __o->test_and_set();
1764}
1765
1766inline _LIBCPP_INLINE_VISIBILITY
1767bool
1768atomic_flag_test_and_set(atomic_flag* __o) _NOEXCEPT
1769{
1770    return __o->test_and_set();
1771}
1772
1773inline _LIBCPP_INLINE_VISIBILITY
1774bool
1775atomic_flag_test_and_set_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
1776{
1777    return __o->test_and_set(__m);
1778}
1779
1780inline _LIBCPP_INLINE_VISIBILITY
1781bool
1782atomic_flag_test_and_set_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT
1783{
1784    return __o->test_and_set(__m);
1785}
1786
1787inline _LIBCPP_INLINE_VISIBILITY
1788void
1789atomic_flag_clear(volatile atomic_flag* __o) _NOEXCEPT
1790{
1791    __o->clear();
1792}
1793
1794inline _LIBCPP_INLINE_VISIBILITY
1795void
1796atomic_flag_clear(atomic_flag* __o) _NOEXCEPT
1797{
1798    __o->clear();
1799}
1800
1801inline _LIBCPP_INLINE_VISIBILITY
1802void
1803atomic_flag_clear_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
1804{
1805    __o->clear(__m);
1806}
1807
1808inline _LIBCPP_INLINE_VISIBILITY
1809void
1810atomic_flag_clear_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT
1811{
1812    __o->clear(__m);
1813}
1814
1815// fences
1816
1817inline _LIBCPP_INLINE_VISIBILITY
1818void
1819atomic_thread_fence(memory_order __m) _NOEXCEPT
1820{
1821    __c11_atomic_thread_fence(__m);
1822}
1823
1824inline _LIBCPP_INLINE_VISIBILITY
1825void
1826atomic_signal_fence(memory_order __m) _NOEXCEPT
1827{
1828    __c11_atomic_signal_fence(__m);
1829}
1830
1831// Atomics for standard typedef types
1832
1833typedef atomic<bool>               atomic_bool;
1834typedef atomic<char>               atomic_char;
1835typedef atomic<signed char>        atomic_schar;
1836typedef atomic<unsigned char>      atomic_uchar;
1837typedef atomic<short>              atomic_short;
1838typedef atomic<unsigned short>     atomic_ushort;
1839typedef atomic<int>                atomic_int;
1840typedef atomic<unsigned int>       atomic_uint;
1841typedef atomic<long>               atomic_long;
1842typedef atomic<unsigned long>      atomic_ulong;
1843typedef atomic<long long>          atomic_llong;
1844typedef atomic<unsigned long long> atomic_ullong;
1845typedef atomic<char16_t>           atomic_char16_t;
1846typedef atomic<char32_t>           atomic_char32_t;
1847typedef atomic<wchar_t>            atomic_wchar_t;
1848
1849typedef atomic<int_least8_t>   atomic_int_least8_t;
1850typedef atomic<uint_least8_t>  atomic_uint_least8_t;
1851typedef atomic<int_least16_t>  atomic_int_least16_t;
1852typedef atomic<uint_least16_t> atomic_uint_least16_t;
1853typedef atomic<int_least32_t>  atomic_int_least32_t;
1854typedef atomic<uint_least32_t> atomic_uint_least32_t;
1855typedef atomic<int_least64_t>  atomic_int_least64_t;
1856typedef atomic<uint_least64_t> atomic_uint_least64_t;
1857
1858typedef atomic<int_fast8_t>   atomic_int_fast8_t;
1859typedef atomic<uint_fast8_t>  atomic_uint_fast8_t;
1860typedef atomic<int_fast16_t>  atomic_int_fast16_t;
1861typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
1862typedef atomic<int_fast32_t>  atomic_int_fast32_t;
1863typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
1864typedef atomic<int_fast64_t>  atomic_int_fast64_t;
1865typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
1866
1867typedef atomic< int8_t>  atomic_int8_t;
1868typedef atomic<uint8_t>  atomic_uint8_t;
1869typedef atomic< int16_t> atomic_int16_t;
1870typedef atomic<uint16_t> atomic_uint16_t;
1871typedef atomic< int32_t> atomic_int32_t;
1872typedef atomic<uint32_t> atomic_uint32_t;
1873typedef atomic< int64_t> atomic_int64_t;
1874typedef atomic<uint64_t> atomic_uint64_t;
1875
1876typedef atomic<intptr_t>  atomic_intptr_t;
1877typedef atomic<uintptr_t> atomic_uintptr_t;
1878typedef atomic<size_t>    atomic_size_t;
1879typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
1880typedef atomic<intmax_t>  atomic_intmax_t;
1881typedef atomic<uintmax_t> atomic_uintmax_t;
1882
1883#define ATOMIC_FLAG_INIT {false}
1884#define ATOMIC_VAR_INIT(__v) {__v}
1885
1886_LIBCPP_END_NAMESPACE_STD
1887
1888#endif  // _LIBCPP_ATOMIC
1889