1// -*- C++ -*-
2//===--------------------------- atomic -----------------------------------===//
3//
4//                     The LLVM Compiler Infrastructure
5//
6// This file is distributed under the University of Illinois Open Source
7// License. See LICENSE.TXT for details.
8//
9//===----------------------------------------------------------------------===//
10
11#ifndef _LIBCPP_ATOMIC
12#define _LIBCPP_ATOMIC
13
14/*
15    atomic synopsis
16
17namespace std
18{
19
20// order and consistency
21
22typedef enum memory_order
23{
24    memory_order_relaxed,
25    memory_order_consume,  // load-consume
26    memory_order_acquire,  // load-acquire
27    memory_order_release,  // store-release
28    memory_order_acq_rel,  // store-release load-acquire
29    memory_order_seq_cst   // store-release load-acquire
30} memory_order;
31
32template <class T> T kill_dependency(T y) noexcept;
33
34// lock-free property
35
36#define ATOMIC_BOOL_LOCK_FREE unspecified
37#define ATOMIC_CHAR_LOCK_FREE unspecified
38#define ATOMIC_CHAR16_T_LOCK_FREE unspecified
39#define ATOMIC_CHAR32_T_LOCK_FREE unspecified
40#define ATOMIC_WCHAR_T_LOCK_FREE unspecified
41#define ATOMIC_SHORT_LOCK_FREE unspecified
42#define ATOMIC_INT_LOCK_FREE unspecified
43#define ATOMIC_LONG_LOCK_FREE unspecified
44#define ATOMIC_LLONG_LOCK_FREE unspecified
45#define ATOMIC_POINTER_LOCK_FREE unspecified
46
47// flag type and operations
48
49typedef struct atomic_flag
50{
51    bool test_and_set(memory_order m = memory_order_seq_cst) volatile noexcept;
52    bool test_and_set(memory_order m = memory_order_seq_cst) noexcept;
53    void clear(memory_order m = memory_order_seq_cst) volatile noexcept;
54    void clear(memory_order m = memory_order_seq_cst) noexcept;
55    atomic_flag()  noexcept = default;
56    atomic_flag(const atomic_flag&) = delete;
57    atomic_flag& operator=(const atomic_flag&) = delete;
58    atomic_flag& operator=(const atomic_flag&) volatile = delete;
59} atomic_flag;
60
61bool
62    atomic_flag_test_and_set(volatile atomic_flag* obj) noexcept;
63
64bool
65    atomic_flag_test_and_set(atomic_flag* obj) noexcept;
66
67bool
68    atomic_flag_test_and_set_explicit(volatile atomic_flag* obj,
69                                      memory_order m) noexcept;
70
71bool
72    atomic_flag_test_and_set_explicit(atomic_flag* obj, memory_order m) noexcept;
73
74void
75    atomic_flag_clear(volatile atomic_flag* obj) noexcept;
76
77void
78    atomic_flag_clear(atomic_flag* obj) noexcept;
79
80void
81    atomic_flag_clear_explicit(volatile atomic_flag* obj, memory_order m) noexcept;
82
83void
84    atomic_flag_clear_explicit(atomic_flag* obj, memory_order m) noexcept;
85
86#define ATOMIC_FLAG_INIT see below
87#define ATOMIC_VAR_INIT(value) see below
88
89template <class T>
90struct atomic
91{
92    bool is_lock_free() const volatile noexcept;
93    bool is_lock_free() const noexcept;
94    void store(T desr, memory_order m = memory_order_seq_cst) volatile noexcept;
95    void store(T desr, memory_order m = memory_order_seq_cst) noexcept;
96    T load(memory_order m = memory_order_seq_cst) const volatile noexcept;
97    T load(memory_order m = memory_order_seq_cst) const noexcept;
98    operator T() const volatile noexcept;
99    operator T() const noexcept;
100    T exchange(T desr, memory_order m = memory_order_seq_cst) volatile noexcept;
101    T exchange(T desr, memory_order m = memory_order_seq_cst) noexcept;
102    bool compare_exchange_weak(T& expc, T desr,
103                               memory_order s, memory_order f) volatile noexcept;
104    bool compare_exchange_weak(T& expc, T desr, memory_order s, memory_order f) noexcept;
105    bool compare_exchange_strong(T& expc, T desr,
106                                 memory_order s, memory_order f) volatile noexcept;
107    bool compare_exchange_strong(T& expc, T desr,
108                                 memory_order s, memory_order f) noexcept;
109    bool compare_exchange_weak(T& expc, T desr,
110                               memory_order m = memory_order_seq_cst) volatile noexcept;
111    bool compare_exchange_weak(T& expc, T desr,
112                               memory_order m = memory_order_seq_cst) noexcept;
113    bool compare_exchange_strong(T& expc, T desr,
114                                memory_order m = memory_order_seq_cst) volatile noexcept;
115    bool compare_exchange_strong(T& expc, T desr,
116                                 memory_order m = memory_order_seq_cst) noexcept;
117
118    atomic() noexcept = default;
119    constexpr atomic(T desr) noexcept;
120    atomic(const atomic&) = delete;
121    atomic& operator=(const atomic&) = delete;
122    atomic& operator=(const atomic&) volatile = delete;
123    T operator=(T) volatile noexcept;
124    T operator=(T) noexcept;
125};
126
127template <>
128struct atomic<integral>
129{
130    bool is_lock_free() const volatile noexcept;
131    bool is_lock_free() const noexcept;
132    void store(integral desr, memory_order m = memory_order_seq_cst) volatile noexcept;
133    void store(integral desr, memory_order m = memory_order_seq_cst) noexcept;
134    integral load(memory_order m = memory_order_seq_cst) const volatile noexcept;
135    integral load(memory_order m = memory_order_seq_cst) const noexcept;
136    operator integral() const volatile noexcept;
137    operator integral() const noexcept;
138    integral exchange(integral desr,
139                      memory_order m = memory_order_seq_cst) volatile noexcept;
140    integral exchange(integral desr, memory_order m = memory_order_seq_cst) noexcept;
141    bool compare_exchange_weak(integral& expc, integral desr,
142                               memory_order s, memory_order f) volatile noexcept;
143    bool compare_exchange_weak(integral& expc, integral desr,
144                               memory_order s, memory_order f) noexcept;
145    bool compare_exchange_strong(integral& expc, integral desr,
146                                 memory_order s, memory_order f) volatile noexcept;
147    bool compare_exchange_strong(integral& expc, integral desr,
148                                 memory_order s, memory_order f) noexcept;
149    bool compare_exchange_weak(integral& expc, integral desr,
150                               memory_order m = memory_order_seq_cst) volatile noexcept;
151    bool compare_exchange_weak(integral& expc, integral desr,
152                               memory_order m = memory_order_seq_cst) noexcept;
153    bool compare_exchange_strong(integral& expc, integral desr,
154                                memory_order m = memory_order_seq_cst) volatile noexcept;
155    bool compare_exchange_strong(integral& expc, integral desr,
156                                 memory_order m = memory_order_seq_cst) noexcept;
157
158    integral
159        fetch_add(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
160    integral fetch_add(integral op, memory_order m = memory_order_seq_cst) noexcept;
161    integral
162        fetch_sub(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
163    integral fetch_sub(integral op, memory_order m = memory_order_seq_cst) noexcept;
164    integral
165        fetch_and(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
166    integral fetch_and(integral op, memory_order m = memory_order_seq_cst) noexcept;
167    integral
168        fetch_or(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
169    integral fetch_or(integral op, memory_order m = memory_order_seq_cst) noexcept;
170    integral
171        fetch_xor(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
172    integral fetch_xor(integral op, memory_order m = memory_order_seq_cst) noexcept;
173
174    atomic() noexcept = default;
175    constexpr atomic(integral desr) noexcept;
176    atomic(const atomic&) = delete;
177    atomic& operator=(const atomic&) = delete;
178    atomic& operator=(const atomic&) volatile = delete;
179    integral operator=(integral desr) volatile noexcept;
180    integral operator=(integral desr) noexcept;
181
182    integral operator++(int) volatile noexcept;
183    integral operator++(int) noexcept;
184    integral operator--(int) volatile noexcept;
185    integral operator--(int) noexcept;
186    integral operator++() volatile noexcept;
187    integral operator++() noexcept;
188    integral operator--() volatile noexcept;
189    integral operator--() noexcept;
190    integral operator+=(integral op) volatile noexcept;
191    integral operator+=(integral op) noexcept;
192    integral operator-=(integral op) volatile noexcept;
193    integral operator-=(integral op) noexcept;
194    integral operator&=(integral op) volatile noexcept;
195    integral operator&=(integral op) noexcept;
196    integral operator|=(integral op) volatile noexcept;
197    integral operator|=(integral op) noexcept;
198    integral operator^=(integral op) volatile noexcept;
199    integral operator^=(integral op) noexcept;
200};
201
202template <class T>
203struct atomic<T*>
204{
205    bool is_lock_free() const volatile noexcept;
206    bool is_lock_free() const noexcept;
207    void store(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept;
208    void store(T* desr, memory_order m = memory_order_seq_cst) noexcept;
209    T* load(memory_order m = memory_order_seq_cst) const volatile noexcept;
210    T* load(memory_order m = memory_order_seq_cst) const noexcept;
211    operator T*() const volatile noexcept;
212    operator T*() const noexcept;
213    T* exchange(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept;
214    T* exchange(T* desr, memory_order m = memory_order_seq_cst) noexcept;
215    bool compare_exchange_weak(T*& expc, T* desr,
216                               memory_order s, memory_order f) volatile noexcept;
217    bool compare_exchange_weak(T*& expc, T* desr,
218                               memory_order s, memory_order f) noexcept;
219    bool compare_exchange_strong(T*& expc, T* desr,
220                                 memory_order s, memory_order f) volatile noexcept;
221    bool compare_exchange_strong(T*& expc, T* desr,
222                                 memory_order s, memory_order f) noexcept;
223    bool compare_exchange_weak(T*& expc, T* desr,
224                               memory_order m = memory_order_seq_cst) volatile noexcept;
225    bool compare_exchange_weak(T*& expc, T* desr,
226                               memory_order m = memory_order_seq_cst) noexcept;
227    bool compare_exchange_strong(T*& expc, T* desr,
228                                memory_order m = memory_order_seq_cst) volatile noexcept;
229    bool compare_exchange_strong(T*& expc, T* desr,
230                                 memory_order m = memory_order_seq_cst) noexcept;
231    T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept;
232    T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept;
233    T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept;
234    T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept;
235
236    atomic() noexcept = default;
237    constexpr atomic(T* desr) noexcept;
238    atomic(const atomic&) = delete;
239    atomic& operator=(const atomic&) = delete;
240    atomic& operator=(const atomic&) volatile = delete;
241
242    T* operator=(T*) volatile noexcept;
243    T* operator=(T*) noexcept;
244    T* operator++(int) volatile noexcept;
245    T* operator++(int) noexcept;
246    T* operator--(int) volatile noexcept;
247    T* operator--(int) noexcept;
248    T* operator++() volatile noexcept;
249    T* operator++() noexcept;
250    T* operator--() volatile noexcept;
251    T* operator--() noexcept;
252    T* operator+=(ptrdiff_t op) volatile noexcept;
253    T* operator+=(ptrdiff_t op) noexcept;
254    T* operator-=(ptrdiff_t op) volatile noexcept;
255    T* operator-=(ptrdiff_t op) noexcept;
256};
257
258
259template <class T>
260    bool
261    atomic_is_lock_free(const volatile atomic<T>* obj) noexcept;
262
263template <class T>
264    bool
265    atomic_is_lock_free(const atomic<T>* obj) noexcept;
266
267template <class T>
268    void
269    atomic_init(volatile atomic<T>* obj, T desr) noexcept;
270
271template <class T>
272    void
273    atomic_init(atomic<T>* obj, T desr) noexcept;
274
275template <class T>
276    void
277    atomic_store(volatile atomic<T>* obj, T desr) noexcept;
278
279template <class T>
280    void
281    atomic_store(atomic<T>* obj, T desr) noexcept;
282
283template <class T>
284    void
285    atomic_store_explicit(volatile atomic<T>* obj, T desr, memory_order m) noexcept;
286
287template <class T>
288    void
289    atomic_store_explicit(atomic<T>* obj, T desr, memory_order m) noexcept;
290
291template <class T>
292    T
293    atomic_load(const volatile atomic<T>* obj) noexcept;
294
295template <class T>
296    T
297    atomic_load(const atomic<T>* obj) noexcept;
298
299template <class T>
300    T
301    atomic_load_explicit(const volatile atomic<T>* obj, memory_order m) noexcept;
302
303template <class T>
304    T
305    atomic_load_explicit(const atomic<T>* obj, memory_order m) noexcept;
306
307template <class T>
308    T
309    atomic_exchange(volatile atomic<T>* obj, T desr) noexcept;
310
311template <class T>
312    T
313    atomic_exchange(atomic<T>* obj, T desr) noexcept;
314
315template <class T>
316    T
317    atomic_exchange_explicit(volatile atomic<T>* obj, T desr, memory_order m) noexcept;
318
319template <class T>
320    T
321    atomic_exchange_explicit(atomic<T>* obj, T desr, memory_order m) noexcept;
322
323template <class T>
324    bool
325    atomic_compare_exchange_weak(volatile atomic<T>* obj, T* expc, T desr) noexcept;
326
327template <class T>
328    bool
329    atomic_compare_exchange_weak(atomic<T>* obj, T* expc, T desr) noexcept;
330
331template <class T>
332    bool
333    atomic_compare_exchange_strong(volatile atomic<T>* obj, T* expc, T desr) noexcept;
334
335template <class T>
336    bool
337    atomic_compare_exchange_strong(atomic<T>* obj, T* expc, T desr) noexcept;
338
339template <class T>
340    bool
341    atomic_compare_exchange_weak_explicit(volatile atomic<T>* obj, T* expc,
342                                          T desr,
343                                          memory_order s, memory_order f) noexcept;
344
345template <class T>
346    bool
347    atomic_compare_exchange_weak_explicit(atomic<T>* obj, T* expc, T desr,
348                                          memory_order s, memory_order f) noexcept;
349
350template <class T>
351    bool
352    atomic_compare_exchange_strong_explicit(volatile atomic<T>* obj,
353                                            T* expc, T desr,
354                                            memory_order s, memory_order f) noexcept;
355
356template <class T>
357    bool
358    atomic_compare_exchange_strong_explicit(atomic<T>* obj, T* expc,
359                                            T desr,
360                                            memory_order s, memory_order f) noexcept;
361
362template <class Integral>
363    Integral
364    atomic_fetch_add(volatile atomic<Integral>* obj, Integral op) noexcept;
365
366template <class Integral>
367    Integral
368    atomic_fetch_add(atomic<Integral>* obj, Integral op) noexcept;
369
370template <class Integral>
371    Integral
372    atomic_fetch_add_explicit(volatile atomic<Integral>* obj, Integral op,
373                              memory_order m) noexcept;
374template <class Integral>
375    Integral
376    atomic_fetch_add_explicit(atomic<Integral>* obj, Integral op,
377                              memory_order m) noexcept;
378template <class Integral>
379    Integral
380    atomic_fetch_sub(volatile atomic<Integral>* obj, Integral op) noexcept;
381
382template <class Integral>
383    Integral
384    atomic_fetch_sub(atomic<Integral>* obj, Integral op) noexcept;
385
386template <class Integral>
387    Integral
388    atomic_fetch_sub_explicit(volatile atomic<Integral>* obj, Integral op,
389                              memory_order m) noexcept;
390template <class Integral>
391    Integral
392    atomic_fetch_sub_explicit(atomic<Integral>* obj, Integral op,
393                              memory_order m) noexcept;
394template <class Integral>
395    Integral
396    atomic_fetch_and(volatile atomic<Integral>* obj, Integral op) noexcept;
397
398template <class Integral>
399    Integral
400    atomic_fetch_and(atomic<Integral>* obj, Integral op) noexcept;
401
402template <class Integral>
403    Integral
404    atomic_fetch_and_explicit(volatile atomic<Integral>* obj, Integral op,
405                              memory_order m) noexcept;
406template <class Integral>
407    Integral
408    atomic_fetch_and_explicit(atomic<Integral>* obj, Integral op,
409                              memory_order m) noexcept;
410template <class Integral>
411    Integral
412    atomic_fetch_or(volatile atomic<Integral>* obj, Integral op) noexcept;
413
414template <class Integral>
415    Integral
416    atomic_fetch_or(atomic<Integral>* obj, Integral op) noexcept;
417
418template <class Integral>
419    Integral
420    atomic_fetch_or_explicit(volatile atomic<Integral>* obj, Integral op,
421                             memory_order m) noexcept;
422template <class Integral>
423    Integral
424    atomic_fetch_or_explicit(atomic<Integral>* obj, Integral op,
425                             memory_order m) noexcept;
426template <class Integral>
427    Integral
428    atomic_fetch_xor(volatile atomic<Integral>* obj, Integral op) noexcept;
429
430template <class Integral>
431    Integral
432    atomic_fetch_xor(atomic<Integral>* obj, Integral op) noexcept;
433
434template <class Integral>
435    Integral
436    atomic_fetch_xor_explicit(volatile atomic<Integral>* obj, Integral op,
437                              memory_order m) noexcept;
438template <class Integral>
439    Integral
440    atomic_fetch_xor_explicit(atomic<Integral>* obj, Integral op,
441                              memory_order m) noexcept;
442
443template <class T>
444    T*
445    atomic_fetch_add(volatile atomic<T*>* obj, ptrdiff_t op) noexcept;
446
447template <class T>
448    T*
449    atomic_fetch_add(atomic<T*>* obj, ptrdiff_t op) noexcept;
450
451template <class T>
452    T*
453    atomic_fetch_add_explicit(volatile atomic<T*>* obj, ptrdiff_t op,
454                              memory_order m) noexcept;
455template <class T>
456    T*
457    atomic_fetch_add_explicit(atomic<T*>* obj, ptrdiff_t op, memory_order m) noexcept;
458
459template <class T>
460    T*
461    atomic_fetch_sub(volatile atomic<T*>* obj, ptrdiff_t op) noexcept;
462
463template <class T>
464    T*
465    atomic_fetch_sub(atomic<T*>* obj, ptrdiff_t op) noexcept;
466
467template <class T>
468    T*
469    atomic_fetch_sub_explicit(volatile atomic<T*>* obj, ptrdiff_t op,
470                              memory_order m) noexcept;
471template <class T>
472    T*
473    atomic_fetch_sub_explicit(atomic<T*>* obj, ptrdiff_t op, memory_order m) noexcept;
474
475// Atomics for standard typedef types
476
477typedef atomic<bool>               atomic_bool;
478typedef atomic<char>               atomic_char;
479typedef atomic<signed char>        atomic_schar;
480typedef atomic<unsigned char>      atomic_uchar;
481typedef atomic<short>              atomic_short;
482typedef atomic<unsigned short>     atomic_ushort;
483typedef atomic<int>                atomic_int;
484typedef atomic<unsigned int>       atomic_uint;
485typedef atomic<long>               atomic_long;
486typedef atomic<unsigned long>      atomic_ulong;
487typedef atomic<long long>          atomic_llong;
488typedef atomic<unsigned long long> atomic_ullong;
489typedef atomic<char16_t>           atomic_char16_t;
490typedef atomic<char32_t>           atomic_char32_t;
491typedef atomic<wchar_t>            atomic_wchar_t;
492
493typedef atomic<int_least8_t>   atomic_int_least8_t;
494typedef atomic<uint_least8_t>  atomic_uint_least8_t;
495typedef atomic<int_least16_t>  atomic_int_least16_t;
496typedef atomic<uint_least16_t> atomic_uint_least16_t;
497typedef atomic<int_least32_t>  atomic_int_least32_t;
498typedef atomic<uint_least32_t> atomic_uint_least32_t;
499typedef atomic<int_least64_t>  atomic_int_least64_t;
500typedef atomic<uint_least64_t> atomic_uint_least64_t;
501
502typedef atomic<int_fast8_t>   atomic_int_fast8_t;
503typedef atomic<uint_fast8_t>  atomic_uint_fast8_t;
504typedef atomic<int_fast16_t>  atomic_int_fast16_t;
505typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
506typedef atomic<int_fast32_t>  atomic_int_fast32_t;
507typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
508typedef atomic<int_fast64_t>  atomic_int_fast64_t;
509typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
510
511typedef atomic<intptr_t>  atomic_intptr_t;
512typedef atomic<uintptr_t> atomic_uintptr_t;
513typedef atomic<size_t>    atomic_size_t;
514typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
515typedef atomic<intmax_t>  atomic_intmax_t;
516typedef atomic<uintmax_t> atomic_uintmax_t;
517
518// fences
519
520void atomic_thread_fence(memory_order m) noexcept;
521void atomic_signal_fence(memory_order m) noexcept;
522
523}  // std
524
525*/
526
527#include <__config>
528#include <cstddef>
529#include <cstdint>
530#include <type_traits>
531
532#if !defined(_LIBCPP_HAS_NO_PRAGMA_SYSTEM_HEADER)
533#pragma GCC system_header
534#endif
535
536#ifdef _LIBCPP_HAS_NO_THREADS
537#error <atomic> is not supported on this single threaded system
538#else // !_LIBCPP_HAS_NO_THREADS
539
540_LIBCPP_BEGIN_NAMESPACE_STD
541
542#if !__has_feature(cxx_atomic) && _GNUC_VER < 407
543#error <atomic> is not implemented
544#else
545
546typedef enum memory_order
547{
548    memory_order_relaxed, memory_order_consume, memory_order_acquire,
549    memory_order_release, memory_order_acq_rel, memory_order_seq_cst
550} memory_order;
551
552#if _GNUC_VER >= 407
553namespace __gcc_atomic {
554template <typename _Tp>
555struct __gcc_atomic_t {
556  __gcc_atomic_t() _NOEXCEPT {}
557  explicit __gcc_atomic_t(_Tp value) _NOEXCEPT : __a_value(value) {}
558  _Tp __a_value;
559};
560#define _Atomic(x) __gcc_atomic::__gcc_atomic_t<x>
561
562template <typename _Tp> _Tp __create();
563
564template <typename _Tp, typename _Td>
565typename enable_if<sizeof(_Tp()->__a_value = __create<_Td>()), char>::type
566    __test_atomic_assignable(int);
567template <typename _Tp, typename _Up>
568__two __test_atomic_assignable(...);
569
570template <typename _Tp, typename _Td>
571struct __can_assign {
572  static const bool value =
573      sizeof(__test_atomic_assignable<_Tp, _Td>(1)) == sizeof(char);
574};
575
576static inline constexpr int __to_gcc_order(memory_order __order) {
577  // Avoid switch statement to make this a constexpr.
578  return __order == memory_order_relaxed ? __ATOMIC_RELAXED:
579         (__order == memory_order_acquire ? __ATOMIC_ACQUIRE:
580          (__order == memory_order_release ? __ATOMIC_RELEASE:
581           (__order == memory_order_seq_cst ? __ATOMIC_SEQ_CST:
582            (__order == memory_order_acq_rel ? __ATOMIC_ACQ_REL:
583              __ATOMIC_CONSUME))));
584}
585
586static inline constexpr int __to_gcc_failure_order(memory_order __order) {
587  // Avoid switch statement to make this a constexpr.
588  return __order == memory_order_relaxed ? __ATOMIC_RELAXED:
589         (__order == memory_order_acquire ? __ATOMIC_ACQUIRE:
590          (__order == memory_order_release ? __ATOMIC_RELAXED:
591           (__order == memory_order_seq_cst ? __ATOMIC_SEQ_CST:
592            (__order == memory_order_acq_rel ? __ATOMIC_ACQUIRE:
593              __ATOMIC_CONSUME))));
594}
595
596} // namespace __gcc_atomic
597
598template <typename _Tp>
599static inline
600typename enable_if<
601    __gcc_atomic::__can_assign<volatile _Atomic(_Tp)*, _Tp>::value>::type
602__c11_atomic_init(volatile _Atomic(_Tp)* __a,  _Tp __val) {
603  __a->__a_value = __val;
604}
605
606template <typename _Tp>
607static inline
608typename enable_if<
609    !__gcc_atomic::__can_assign<volatile _Atomic(_Tp)*, _Tp>::value &&
610     __gcc_atomic::__can_assign<         _Atomic(_Tp)*, _Tp>::value>::type
611__c11_atomic_init(volatile _Atomic(_Tp)* __a,  _Tp __val) {
612  // [atomics.types.generic]p1 guarantees _Tp is trivially copyable. Because
613  // the default operator= in an object is not volatile, a byte-by-byte copy
614  // is required.
615  volatile char* to = reinterpret_cast<volatile char*>(&__a->__a_value);
616  volatile char* end = to + sizeof(_Tp);
617  char* from = reinterpret_cast<char*>(&__val);
618  while (to != end) {
619    *to++ = *from++;
620  }
621}
622
623template <typename _Tp>
624static inline void __c11_atomic_init(_Atomic(_Tp)* __a,  _Tp __val) {
625  __a->__a_value = __val;
626}
627
628static inline void __c11_atomic_thread_fence(memory_order __order) {
629  __atomic_thread_fence(__gcc_atomic::__to_gcc_order(__order));
630}
631
632static inline void __c11_atomic_signal_fence(memory_order __order) {
633  __atomic_signal_fence(__gcc_atomic::__to_gcc_order(__order));
634}
635
636static inline bool __c11_atomic_is_lock_free(size_t __size) {
637  return __atomic_is_lock_free(__size, 0);
638}
639
640template <typename _Tp>
641static inline void __c11_atomic_store(volatile _Atomic(_Tp)* __a,  _Tp __val,
642                                      memory_order __order) {
643  return __atomic_store(&__a->__a_value, &__val,
644                        __gcc_atomic::__to_gcc_order(__order));
645}
646
647template <typename _Tp>
648static inline void __c11_atomic_store(_Atomic(_Tp)* __a,  _Tp __val,
649                                      memory_order __order) {
650  __atomic_store(&__a->__a_value, &__val,
651                 __gcc_atomic::__to_gcc_order(__order));
652}
653
654template <typename _Tp>
655static inline _Tp __c11_atomic_load(volatile _Atomic(_Tp)* __a,
656                                    memory_order __order) {
657  _Tp __ret;
658  __atomic_load(&__a->__a_value, &__ret,
659                __gcc_atomic::__to_gcc_order(__order));
660  return __ret;
661}
662
663template <typename _Tp>
664static inline _Tp __c11_atomic_load(_Atomic(_Tp)* __a, memory_order __order) {
665  _Tp __ret;
666  __atomic_load(&__a->__a_value, &__ret,
667                __gcc_atomic::__to_gcc_order(__order));
668  return __ret;
669}
670
671template <typename _Tp>
672static inline _Tp __c11_atomic_exchange(volatile _Atomic(_Tp)* __a,
673                                        _Tp __value, memory_order __order) {
674  _Tp __ret;
675  __atomic_exchange(&__a->__a_value, &__value, &__ret,
676                    __gcc_atomic::__to_gcc_order(__order));
677  return __ret;
678}
679
680template <typename _Tp>
681static inline _Tp __c11_atomic_exchange(_Atomic(_Tp)* __a, _Tp __value,
682                                        memory_order __order) {
683  _Tp __ret;
684  __atomic_exchange(&__a->__a_value, &__value, &__ret,
685                    __gcc_atomic::__to_gcc_order(__order));
686  return __ret;
687}
688
689template <typename _Tp>
690static inline bool __c11_atomic_compare_exchange_strong(
691    volatile _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value,
692    memory_order __success, memory_order __failure) {
693  return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
694                                   false,
695                                   __gcc_atomic::__to_gcc_order(__success),
696                                   __gcc_atomic::__to_gcc_failure_order(__failure));
697}
698
699template <typename _Tp>
700static inline bool __c11_atomic_compare_exchange_strong(
701    _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value, memory_order __success,
702    memory_order __failure) {
703  return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
704                                   false,
705                                   __gcc_atomic::__to_gcc_order(__success),
706                                   __gcc_atomic::__to_gcc_failure_order(__failure));
707}
708
709template <typename _Tp>
710static inline bool __c11_atomic_compare_exchange_weak(
711    volatile _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value,
712    memory_order __success, memory_order __failure) {
713  return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
714                                   true,
715                                   __gcc_atomic::__to_gcc_order(__success),
716                                   __gcc_atomic::__to_gcc_failure_order(__failure));
717}
718
719template <typename _Tp>
720static inline bool __c11_atomic_compare_exchange_weak(
721    _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value, memory_order __success,
722    memory_order __failure) {
723  return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
724                                   true,
725                                   __gcc_atomic::__to_gcc_order(__success),
726                                   __gcc_atomic::__to_gcc_failure_order(__failure));
727}
728
729template <typename _Tp>
730struct __skip_amt { enum {value = 1}; };
731
732template <typename _Tp>
733struct __skip_amt<_Tp*> { enum {value = sizeof(_Tp)}; };
734
735// FIXME: Haven't figured out what the spec says about using arrays with
736// atomic_fetch_add. Force a failure rather than creating bad behavior.
737template <typename _Tp>
738struct __skip_amt<_Tp[]> { };
739template <typename _Tp, int n>
740struct __skip_amt<_Tp[n]> { };
741
742template <typename _Tp, typename _Td>
743static inline _Tp __c11_atomic_fetch_add(volatile _Atomic(_Tp)* __a,
744                                         _Td __delta, memory_order __order) {
745  return __atomic_fetch_add(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
746                            __gcc_atomic::__to_gcc_order(__order));
747}
748
749template <typename _Tp, typename _Td>
750static inline _Tp __c11_atomic_fetch_add(_Atomic(_Tp)* __a, _Td __delta,
751                                         memory_order __order) {
752  return __atomic_fetch_add(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
753                            __gcc_atomic::__to_gcc_order(__order));
754}
755
756template <typename _Tp, typename _Td>
757static inline _Tp __c11_atomic_fetch_sub(volatile _Atomic(_Tp)* __a,
758                                         _Td __delta, memory_order __order) {
759  return __atomic_fetch_sub(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
760                            __gcc_atomic::__to_gcc_order(__order));
761}
762
763template <typename _Tp, typename _Td>
764static inline _Tp __c11_atomic_fetch_sub(_Atomic(_Tp)* __a, _Td __delta,
765                                         memory_order __order) {
766  return __atomic_fetch_sub(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
767                            __gcc_atomic::__to_gcc_order(__order));
768}
769
770template <typename _Tp>
771static inline _Tp __c11_atomic_fetch_and(volatile _Atomic(_Tp)* __a,
772                                         _Tp __pattern, memory_order __order) {
773  return __atomic_fetch_and(&__a->__a_value, __pattern,
774                            __gcc_atomic::__to_gcc_order(__order));
775}
776
777template <typename _Tp>
778static inline _Tp __c11_atomic_fetch_and(_Atomic(_Tp)* __a,
779                                         _Tp __pattern, memory_order __order) {
780  return __atomic_fetch_and(&__a->__a_value, __pattern,
781                            __gcc_atomic::__to_gcc_order(__order));
782}
783
784template <typename _Tp>
785static inline _Tp __c11_atomic_fetch_or(volatile _Atomic(_Tp)* __a,
786                                        _Tp __pattern, memory_order __order) {
787  return __atomic_fetch_or(&__a->__a_value, __pattern,
788                           __gcc_atomic::__to_gcc_order(__order));
789}
790
791template <typename _Tp>
792static inline _Tp __c11_atomic_fetch_or(_Atomic(_Tp)* __a, _Tp __pattern,
793                                        memory_order __order) {
794  return __atomic_fetch_or(&__a->__a_value, __pattern,
795                           __gcc_atomic::__to_gcc_order(__order));
796}
797
798template <typename _Tp>
799static inline _Tp __c11_atomic_fetch_xor(volatile _Atomic(_Tp)* __a,
800                                         _Tp __pattern, memory_order __order) {
801  return __atomic_fetch_xor(&__a->__a_value, __pattern,
802                            __gcc_atomic::__to_gcc_order(__order));
803}
804
805template <typename _Tp>
806static inline _Tp __c11_atomic_fetch_xor(_Atomic(_Tp)* __a, _Tp __pattern,
807                                         memory_order __order) {
808  return __atomic_fetch_xor(&__a->__a_value, __pattern,
809                            __gcc_atomic::__to_gcc_order(__order));
810}
811#endif // _GNUC_VER >= 407
812
813template <class _Tp>
814inline _LIBCPP_INLINE_VISIBILITY
815_Tp
816kill_dependency(_Tp __y) _NOEXCEPT
817{
818    return __y;
819}
820
821// general atomic<T>
822
823template <class _Tp, bool = is_integral<_Tp>::value && !is_same<_Tp, bool>::value>
824struct __atomic_base  // false
825{
826    mutable _Atomic(_Tp) __a_;
827
828    _LIBCPP_INLINE_VISIBILITY
829    bool is_lock_free() const volatile _NOEXCEPT
830        {return __c11_atomic_is_lock_free(sizeof(_Tp));}
831    _LIBCPP_INLINE_VISIBILITY
832    bool is_lock_free() const _NOEXCEPT
833        {return __c11_atomic_is_lock_free(sizeof(_Tp));}
834    _LIBCPP_INLINE_VISIBILITY
835    void store(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
836        {__c11_atomic_store(&__a_, __d, __m);}
837    _LIBCPP_INLINE_VISIBILITY
838    void store(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
839        {__c11_atomic_store(&__a_, __d, __m);}
840    _LIBCPP_INLINE_VISIBILITY
841    _Tp load(memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT
842        {return __c11_atomic_load(&__a_, __m);}
843    _LIBCPP_INLINE_VISIBILITY
844    _Tp load(memory_order __m = memory_order_seq_cst) const _NOEXCEPT
845        {return __c11_atomic_load(&__a_, __m);}
846    _LIBCPP_INLINE_VISIBILITY
847    operator _Tp() const volatile _NOEXCEPT {return load();}
848    _LIBCPP_INLINE_VISIBILITY
849    operator _Tp() const _NOEXCEPT          {return load();}
850    _LIBCPP_INLINE_VISIBILITY
851    _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
852        {return __c11_atomic_exchange(&__a_, __d, __m);}
853    _LIBCPP_INLINE_VISIBILITY
854    _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
855        {return __c11_atomic_exchange(&__a_, __d, __m);}
856    _LIBCPP_INLINE_VISIBILITY
857    bool compare_exchange_weak(_Tp& __e, _Tp __d,
858                               memory_order __s, memory_order __f) volatile _NOEXCEPT
859        {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);}
860    _LIBCPP_INLINE_VISIBILITY
861    bool compare_exchange_weak(_Tp& __e, _Tp __d,
862                               memory_order __s, memory_order __f) _NOEXCEPT
863        {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);}
864    _LIBCPP_INLINE_VISIBILITY
865    bool compare_exchange_strong(_Tp& __e, _Tp __d,
866                                 memory_order __s, memory_order __f) volatile _NOEXCEPT
867        {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);}
868    _LIBCPP_INLINE_VISIBILITY
869    bool compare_exchange_strong(_Tp& __e, _Tp __d,
870                                 memory_order __s, memory_order __f) _NOEXCEPT
871        {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);}
872    _LIBCPP_INLINE_VISIBILITY
873    bool compare_exchange_weak(_Tp& __e, _Tp __d,
874                              memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
875        {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);}
876    _LIBCPP_INLINE_VISIBILITY
877    bool compare_exchange_weak(_Tp& __e, _Tp __d,
878                               memory_order __m = memory_order_seq_cst) _NOEXCEPT
879        {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);}
880    _LIBCPP_INLINE_VISIBILITY
881    bool compare_exchange_strong(_Tp& __e, _Tp __d,
882                              memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
883        {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);}
884    _LIBCPP_INLINE_VISIBILITY
885    bool compare_exchange_strong(_Tp& __e, _Tp __d,
886                                 memory_order __m = memory_order_seq_cst) _NOEXCEPT
887        {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);}
888
889    _LIBCPP_INLINE_VISIBILITY
890#ifndef _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
891    __atomic_base() _NOEXCEPT = default;
892#else
893    __atomic_base() _NOEXCEPT : __a_() {}
894#endif // _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
895
896    _LIBCPP_INLINE_VISIBILITY
897    _LIBCPP_CONSTEXPR __atomic_base(_Tp __d) _NOEXCEPT : __a_(__d) {}
898#ifndef _LIBCPP_HAS_NO_DELETED_FUNCTIONS
899    __atomic_base(const __atomic_base&) = delete;
900    __atomic_base& operator=(const __atomic_base&) = delete;
901    __atomic_base& operator=(const __atomic_base&) volatile = delete;
902#else  // _LIBCPP_HAS_NO_DELETED_FUNCTIONS
903private:
904    __atomic_base(const __atomic_base&);
905    __atomic_base& operator=(const __atomic_base&);
906    __atomic_base& operator=(const __atomic_base&) volatile;
907#endif  // _LIBCPP_HAS_NO_DELETED_FUNCTIONS
908};
909
910// atomic<Integral>
911
912template <class _Tp>
913struct __atomic_base<_Tp, true>
914    : public __atomic_base<_Tp, false>
915{
916    typedef __atomic_base<_Tp, false> __base;
917    _LIBCPP_INLINE_VISIBILITY
918    __atomic_base() _NOEXCEPT _LIBCPP_DEFAULT
919    _LIBCPP_INLINE_VISIBILITY
920    _LIBCPP_CONSTEXPR __atomic_base(_Tp __d) _NOEXCEPT : __base(__d) {}
921
922    _LIBCPP_INLINE_VISIBILITY
923    _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
924        {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
925    _LIBCPP_INLINE_VISIBILITY
926    _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
927        {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
928    _LIBCPP_INLINE_VISIBILITY
929    _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
930        {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
931    _LIBCPP_INLINE_VISIBILITY
932    _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
933        {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
934    _LIBCPP_INLINE_VISIBILITY
935    _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
936        {return __c11_atomic_fetch_and(&this->__a_, __op, __m);}
937    _LIBCPP_INLINE_VISIBILITY
938    _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
939        {return __c11_atomic_fetch_and(&this->__a_, __op, __m);}
940    _LIBCPP_INLINE_VISIBILITY
941    _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
942        {return __c11_atomic_fetch_or(&this->__a_, __op, __m);}
943    _LIBCPP_INLINE_VISIBILITY
944    _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
945        {return __c11_atomic_fetch_or(&this->__a_, __op, __m);}
946    _LIBCPP_INLINE_VISIBILITY
947    _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
948        {return __c11_atomic_fetch_xor(&this->__a_, __op, __m);}
949    _LIBCPP_INLINE_VISIBILITY
950    _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
951        {return __c11_atomic_fetch_xor(&this->__a_, __op, __m);}
952
953    _LIBCPP_INLINE_VISIBILITY
954    _Tp operator++(int) volatile _NOEXCEPT      {return fetch_add(_Tp(1));}
955    _LIBCPP_INLINE_VISIBILITY
956    _Tp operator++(int) _NOEXCEPT               {return fetch_add(_Tp(1));}
957    _LIBCPP_INLINE_VISIBILITY
958    _Tp operator--(int) volatile _NOEXCEPT      {return fetch_sub(_Tp(1));}
959    _LIBCPP_INLINE_VISIBILITY
960    _Tp operator--(int) _NOEXCEPT               {return fetch_sub(_Tp(1));}
961    _LIBCPP_INLINE_VISIBILITY
962    _Tp operator++() volatile _NOEXCEPT         {return fetch_add(_Tp(1)) + _Tp(1);}
963    _LIBCPP_INLINE_VISIBILITY
964    _Tp operator++() _NOEXCEPT                  {return fetch_add(_Tp(1)) + _Tp(1);}
965    _LIBCPP_INLINE_VISIBILITY
966    _Tp operator--() volatile _NOEXCEPT         {return fetch_sub(_Tp(1)) - _Tp(1);}
967    _LIBCPP_INLINE_VISIBILITY
968    _Tp operator--() _NOEXCEPT                  {return fetch_sub(_Tp(1)) - _Tp(1);}
969    _LIBCPP_INLINE_VISIBILITY
970    _Tp operator+=(_Tp __op) volatile _NOEXCEPT {return fetch_add(__op) + __op;}
971    _LIBCPP_INLINE_VISIBILITY
972    _Tp operator+=(_Tp __op) _NOEXCEPT          {return fetch_add(__op) + __op;}
973    _LIBCPP_INLINE_VISIBILITY
974    _Tp operator-=(_Tp __op) volatile _NOEXCEPT {return fetch_sub(__op) - __op;}
975    _LIBCPP_INLINE_VISIBILITY
976    _Tp operator-=(_Tp __op) _NOEXCEPT          {return fetch_sub(__op) - __op;}
977    _LIBCPP_INLINE_VISIBILITY
978    _Tp operator&=(_Tp __op) volatile _NOEXCEPT {return fetch_and(__op) & __op;}
979    _LIBCPP_INLINE_VISIBILITY
980    _Tp operator&=(_Tp __op) _NOEXCEPT          {return fetch_and(__op) & __op;}
981    _LIBCPP_INLINE_VISIBILITY
982    _Tp operator|=(_Tp __op) volatile _NOEXCEPT {return fetch_or(__op) | __op;}
983    _LIBCPP_INLINE_VISIBILITY
984    _Tp operator|=(_Tp __op) _NOEXCEPT          {return fetch_or(__op) | __op;}
985    _LIBCPP_INLINE_VISIBILITY
986    _Tp operator^=(_Tp __op) volatile _NOEXCEPT {return fetch_xor(__op) ^ __op;}
987    _LIBCPP_INLINE_VISIBILITY
988    _Tp operator^=(_Tp __op) _NOEXCEPT          {return fetch_xor(__op) ^ __op;}
989};
990
991// atomic<T>
992
993template <class _Tp>
994struct atomic
995    : public __atomic_base<_Tp>
996{
997    typedef __atomic_base<_Tp> __base;
998    _LIBCPP_INLINE_VISIBILITY
999    atomic() _NOEXCEPT _LIBCPP_DEFAULT
1000    _LIBCPP_INLINE_VISIBILITY
1001    _LIBCPP_CONSTEXPR atomic(_Tp __d) _NOEXCEPT : __base(__d) {}
1002
1003    _LIBCPP_INLINE_VISIBILITY
1004    _Tp operator=(_Tp __d) volatile _NOEXCEPT
1005        {__base::store(__d); return __d;}
1006    _LIBCPP_INLINE_VISIBILITY
1007    _Tp operator=(_Tp __d) _NOEXCEPT
1008        {__base::store(__d); return __d;}
1009};
1010
1011// atomic<T*>
1012
1013template <class _Tp>
1014struct atomic<_Tp*>
1015    : public __atomic_base<_Tp*>
1016{
1017    typedef __atomic_base<_Tp*> __base;
1018    _LIBCPP_INLINE_VISIBILITY
1019    atomic() _NOEXCEPT _LIBCPP_DEFAULT
1020    _LIBCPP_INLINE_VISIBILITY
1021    _LIBCPP_CONSTEXPR atomic(_Tp* __d) _NOEXCEPT : __base(__d) {}
1022
1023    _LIBCPP_INLINE_VISIBILITY
1024    _Tp* operator=(_Tp* __d) volatile _NOEXCEPT
1025        {__base::store(__d); return __d;}
1026    _LIBCPP_INLINE_VISIBILITY
1027    _Tp* operator=(_Tp* __d) _NOEXCEPT
1028        {__base::store(__d); return __d;}
1029
1030    _LIBCPP_INLINE_VISIBILITY
1031    _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst)
1032                                                                        volatile _NOEXCEPT
1033        {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
1034    _LIBCPP_INLINE_VISIBILITY
1035    _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1036        {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
1037    _LIBCPP_INLINE_VISIBILITY
1038    _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst)
1039                                                                        volatile _NOEXCEPT
1040        {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
1041    _LIBCPP_INLINE_VISIBILITY
1042    _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1043        {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
1044
1045    _LIBCPP_INLINE_VISIBILITY
1046    _Tp* operator++(int) volatile _NOEXCEPT            {return fetch_add(1);}
1047    _LIBCPP_INLINE_VISIBILITY
1048    _Tp* operator++(int) _NOEXCEPT                     {return fetch_add(1);}
1049    _LIBCPP_INLINE_VISIBILITY
1050    _Tp* operator--(int) volatile _NOEXCEPT            {return fetch_sub(1);}
1051    _LIBCPP_INLINE_VISIBILITY
1052    _Tp* operator--(int) _NOEXCEPT                     {return fetch_sub(1);}
1053    _LIBCPP_INLINE_VISIBILITY
1054    _Tp* operator++() volatile _NOEXCEPT               {return fetch_add(1) + 1;}
1055    _LIBCPP_INLINE_VISIBILITY
1056    _Tp* operator++() _NOEXCEPT                        {return fetch_add(1) + 1;}
1057    _LIBCPP_INLINE_VISIBILITY
1058    _Tp* operator--() volatile _NOEXCEPT               {return fetch_sub(1) - 1;}
1059    _LIBCPP_INLINE_VISIBILITY
1060    _Tp* operator--() _NOEXCEPT                        {return fetch_sub(1) - 1;}
1061    _LIBCPP_INLINE_VISIBILITY
1062    _Tp* operator+=(ptrdiff_t __op) volatile _NOEXCEPT {return fetch_add(__op) + __op;}
1063    _LIBCPP_INLINE_VISIBILITY
1064    _Tp* operator+=(ptrdiff_t __op) _NOEXCEPT          {return fetch_add(__op) + __op;}
1065    _LIBCPP_INLINE_VISIBILITY
1066    _Tp* operator-=(ptrdiff_t __op) volatile _NOEXCEPT {return fetch_sub(__op) - __op;}
1067    _LIBCPP_INLINE_VISIBILITY
1068    _Tp* operator-=(ptrdiff_t __op) _NOEXCEPT          {return fetch_sub(__op) - __op;}
1069};
1070
1071// atomic_is_lock_free
1072
1073template <class _Tp>
1074inline _LIBCPP_INLINE_VISIBILITY
1075bool
1076atomic_is_lock_free(const volatile atomic<_Tp>* __o) _NOEXCEPT
1077{
1078    return __o->is_lock_free();
1079}
1080
1081template <class _Tp>
1082inline _LIBCPP_INLINE_VISIBILITY
1083bool
1084atomic_is_lock_free(const atomic<_Tp>* __o) _NOEXCEPT
1085{
1086    return __o->is_lock_free();
1087}
1088
1089// atomic_init
1090
1091template <class _Tp>
1092inline _LIBCPP_INLINE_VISIBILITY
1093void
1094atomic_init(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1095{
1096    __c11_atomic_init(&__o->__a_, __d);
1097}
1098
1099template <class _Tp>
1100inline _LIBCPP_INLINE_VISIBILITY
1101void
1102atomic_init(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1103{
1104    __c11_atomic_init(&__o->__a_, __d);
1105}
1106
1107// atomic_store
1108
1109template <class _Tp>
1110inline _LIBCPP_INLINE_VISIBILITY
1111void
1112atomic_store(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1113{
1114    __o->store(__d);
1115}
1116
1117template <class _Tp>
1118inline _LIBCPP_INLINE_VISIBILITY
1119void
1120atomic_store(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1121{
1122    __o->store(__d);
1123}
1124
1125// atomic_store_explicit
1126
1127template <class _Tp>
1128inline _LIBCPP_INLINE_VISIBILITY
1129void
1130atomic_store_explicit(volatile atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1131{
1132    __o->store(__d, __m);
1133}
1134
1135template <class _Tp>
1136inline _LIBCPP_INLINE_VISIBILITY
1137void
1138atomic_store_explicit(atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1139{
1140    __o->store(__d, __m);
1141}
1142
1143// atomic_load
1144
1145template <class _Tp>
1146inline _LIBCPP_INLINE_VISIBILITY
1147_Tp
1148atomic_load(const volatile atomic<_Tp>* __o) _NOEXCEPT
1149{
1150    return __o->load();
1151}
1152
1153template <class _Tp>
1154inline _LIBCPP_INLINE_VISIBILITY
1155_Tp
1156atomic_load(const atomic<_Tp>* __o) _NOEXCEPT
1157{
1158    return __o->load();
1159}
1160
1161// atomic_load_explicit
1162
1163template <class _Tp>
1164inline _LIBCPP_INLINE_VISIBILITY
1165_Tp
1166atomic_load_explicit(const volatile atomic<_Tp>* __o, memory_order __m) _NOEXCEPT
1167{
1168    return __o->load(__m);
1169}
1170
1171template <class _Tp>
1172inline _LIBCPP_INLINE_VISIBILITY
1173_Tp
1174atomic_load_explicit(const atomic<_Tp>* __o, memory_order __m) _NOEXCEPT
1175{
1176    return __o->load(__m);
1177}
1178
1179// atomic_exchange
1180
1181template <class _Tp>
1182inline _LIBCPP_INLINE_VISIBILITY
1183_Tp
1184atomic_exchange(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1185{
1186    return __o->exchange(__d);
1187}
1188
1189template <class _Tp>
1190inline _LIBCPP_INLINE_VISIBILITY
1191_Tp
1192atomic_exchange(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1193{
1194    return __o->exchange(__d);
1195}
1196
1197// atomic_exchange_explicit
1198
1199template <class _Tp>
1200inline _LIBCPP_INLINE_VISIBILITY
1201_Tp
1202atomic_exchange_explicit(volatile atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1203{
1204    return __o->exchange(__d, __m);
1205}
1206
1207template <class _Tp>
1208inline _LIBCPP_INLINE_VISIBILITY
1209_Tp
1210atomic_exchange_explicit(atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1211{
1212    return __o->exchange(__d, __m);
1213}
1214
1215// atomic_compare_exchange_weak
1216
1217template <class _Tp>
1218inline _LIBCPP_INLINE_VISIBILITY
1219bool
1220atomic_compare_exchange_weak(volatile atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
1221{
1222    return __o->compare_exchange_weak(*__e, __d);
1223}
1224
1225template <class _Tp>
1226inline _LIBCPP_INLINE_VISIBILITY
1227bool
1228atomic_compare_exchange_weak(atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
1229{
1230    return __o->compare_exchange_weak(*__e, __d);
1231}
1232
1233// atomic_compare_exchange_strong
1234
1235template <class _Tp>
1236inline _LIBCPP_INLINE_VISIBILITY
1237bool
1238atomic_compare_exchange_strong(volatile atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
1239{
1240    return __o->compare_exchange_strong(*__e, __d);
1241}
1242
1243template <class _Tp>
1244inline _LIBCPP_INLINE_VISIBILITY
1245bool
1246atomic_compare_exchange_strong(atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
1247{
1248    return __o->compare_exchange_strong(*__e, __d);
1249}
1250
1251// atomic_compare_exchange_weak_explicit
1252
1253template <class _Tp>
1254inline _LIBCPP_INLINE_VISIBILITY
1255bool
1256atomic_compare_exchange_weak_explicit(volatile atomic<_Tp>* __o, _Tp* __e,
1257                                      _Tp __d,
1258                                      memory_order __s, memory_order __f) _NOEXCEPT
1259{
1260    return __o->compare_exchange_weak(*__e, __d, __s, __f);
1261}
1262
1263template <class _Tp>
1264inline _LIBCPP_INLINE_VISIBILITY
1265bool
1266atomic_compare_exchange_weak_explicit(atomic<_Tp>* __o, _Tp* __e, _Tp __d,
1267                                      memory_order __s, memory_order __f) _NOEXCEPT
1268{
1269    return __o->compare_exchange_weak(*__e, __d, __s, __f);
1270}
1271
1272// atomic_compare_exchange_strong_explicit
1273
1274template <class _Tp>
1275inline _LIBCPP_INLINE_VISIBILITY
1276bool
1277atomic_compare_exchange_strong_explicit(volatile atomic<_Tp>* __o,
1278                                        _Tp* __e, _Tp __d,
1279                                        memory_order __s, memory_order __f) _NOEXCEPT
1280{
1281    return __o->compare_exchange_strong(*__e, __d, __s, __f);
1282}
1283
1284template <class _Tp>
1285inline _LIBCPP_INLINE_VISIBILITY
1286bool
1287atomic_compare_exchange_strong_explicit(atomic<_Tp>* __o, _Tp* __e,
1288                                        _Tp __d,
1289                                        memory_order __s, memory_order __f) _NOEXCEPT
1290{
1291    return __o->compare_exchange_strong(*__e, __d, __s, __f);
1292}
1293
1294// atomic_fetch_add
1295
1296template <class _Tp>
1297inline _LIBCPP_INLINE_VISIBILITY
1298typename enable_if
1299<
1300    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1301    _Tp
1302>::type
1303atomic_fetch_add(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1304{
1305    return __o->fetch_add(__op);
1306}
1307
1308template <class _Tp>
1309inline _LIBCPP_INLINE_VISIBILITY
1310typename enable_if
1311<
1312    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1313    _Tp
1314>::type
1315atomic_fetch_add(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1316{
1317    return __o->fetch_add(__op);
1318}
1319
1320template <class _Tp>
1321inline _LIBCPP_INLINE_VISIBILITY
1322_Tp*
1323atomic_fetch_add(volatile atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
1324{
1325    return __o->fetch_add(__op);
1326}
1327
1328template <class _Tp>
1329inline _LIBCPP_INLINE_VISIBILITY
1330_Tp*
1331atomic_fetch_add(atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
1332{
1333    return __o->fetch_add(__op);
1334}
1335
1336// atomic_fetch_add_explicit
1337
1338template <class _Tp>
1339inline _LIBCPP_INLINE_VISIBILITY
1340typename enable_if
1341<
1342    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1343    _Tp
1344>::type
1345atomic_fetch_add_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1346{
1347    return __o->fetch_add(__op, __m);
1348}
1349
1350template <class _Tp>
1351inline _LIBCPP_INLINE_VISIBILITY
1352typename enable_if
1353<
1354    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1355    _Tp
1356>::type
1357atomic_fetch_add_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1358{
1359    return __o->fetch_add(__op, __m);
1360}
1361
1362template <class _Tp>
1363inline _LIBCPP_INLINE_VISIBILITY
1364_Tp*
1365atomic_fetch_add_explicit(volatile atomic<_Tp*>* __o, ptrdiff_t __op,
1366                          memory_order __m) _NOEXCEPT
1367{
1368    return __o->fetch_add(__op, __m);
1369}
1370
1371template <class _Tp>
1372inline _LIBCPP_INLINE_VISIBILITY
1373_Tp*
1374atomic_fetch_add_explicit(atomic<_Tp*>* __o, ptrdiff_t __op, memory_order __m) _NOEXCEPT
1375{
1376    return __o->fetch_add(__op, __m);
1377}
1378
1379// atomic_fetch_sub
1380
1381template <class _Tp>
1382inline _LIBCPP_INLINE_VISIBILITY
1383typename enable_if
1384<
1385    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1386    _Tp
1387>::type
1388atomic_fetch_sub(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1389{
1390    return __o->fetch_sub(__op);
1391}
1392
1393template <class _Tp>
1394inline _LIBCPP_INLINE_VISIBILITY
1395typename enable_if
1396<
1397    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1398    _Tp
1399>::type
1400atomic_fetch_sub(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1401{
1402    return __o->fetch_sub(__op);
1403}
1404
1405template <class _Tp>
1406inline _LIBCPP_INLINE_VISIBILITY
1407_Tp*
1408atomic_fetch_sub(volatile atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
1409{
1410    return __o->fetch_sub(__op);
1411}
1412
1413template <class _Tp>
1414inline _LIBCPP_INLINE_VISIBILITY
1415_Tp*
1416atomic_fetch_sub(atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
1417{
1418    return __o->fetch_sub(__op);
1419}
1420
1421// atomic_fetch_sub_explicit
1422
1423template <class _Tp>
1424inline _LIBCPP_INLINE_VISIBILITY
1425typename enable_if
1426<
1427    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1428    _Tp
1429>::type
1430atomic_fetch_sub_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1431{
1432    return __o->fetch_sub(__op, __m);
1433}
1434
1435template <class _Tp>
1436inline _LIBCPP_INLINE_VISIBILITY
1437typename enable_if
1438<
1439    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1440    _Tp
1441>::type
1442atomic_fetch_sub_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1443{
1444    return __o->fetch_sub(__op, __m);
1445}
1446
1447template <class _Tp>
1448inline _LIBCPP_INLINE_VISIBILITY
1449_Tp*
1450atomic_fetch_sub_explicit(volatile atomic<_Tp*>* __o, ptrdiff_t __op,
1451                          memory_order __m) _NOEXCEPT
1452{
1453    return __o->fetch_sub(__op, __m);
1454}
1455
1456template <class _Tp>
1457inline _LIBCPP_INLINE_VISIBILITY
1458_Tp*
1459atomic_fetch_sub_explicit(atomic<_Tp*>* __o, ptrdiff_t __op, memory_order __m) _NOEXCEPT
1460{
1461    return __o->fetch_sub(__op, __m);
1462}
1463
1464// atomic_fetch_and
1465
1466template <class _Tp>
1467inline _LIBCPP_INLINE_VISIBILITY
1468typename enable_if
1469<
1470    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1471    _Tp
1472>::type
1473atomic_fetch_and(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1474{
1475    return __o->fetch_and(__op);
1476}
1477
1478template <class _Tp>
1479inline _LIBCPP_INLINE_VISIBILITY
1480typename enable_if
1481<
1482    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1483    _Tp
1484>::type
1485atomic_fetch_and(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1486{
1487    return __o->fetch_and(__op);
1488}
1489
1490// atomic_fetch_and_explicit
1491
1492template <class _Tp>
1493inline _LIBCPP_INLINE_VISIBILITY
1494typename enable_if
1495<
1496    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1497    _Tp
1498>::type
1499atomic_fetch_and_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1500{
1501    return __o->fetch_and(__op, __m);
1502}
1503
1504template <class _Tp>
1505inline _LIBCPP_INLINE_VISIBILITY
1506typename enable_if
1507<
1508    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1509    _Tp
1510>::type
1511atomic_fetch_and_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1512{
1513    return __o->fetch_and(__op, __m);
1514}
1515
1516// atomic_fetch_or
1517
1518template <class _Tp>
1519inline _LIBCPP_INLINE_VISIBILITY
1520typename enable_if
1521<
1522    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1523    _Tp
1524>::type
1525atomic_fetch_or(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1526{
1527    return __o->fetch_or(__op);
1528}
1529
1530template <class _Tp>
1531inline _LIBCPP_INLINE_VISIBILITY
1532typename enable_if
1533<
1534    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1535    _Tp
1536>::type
1537atomic_fetch_or(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1538{
1539    return __o->fetch_or(__op);
1540}
1541
1542// atomic_fetch_or_explicit
1543
1544template <class _Tp>
1545inline _LIBCPP_INLINE_VISIBILITY
1546typename enable_if
1547<
1548    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1549    _Tp
1550>::type
1551atomic_fetch_or_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1552{
1553    return __o->fetch_or(__op, __m);
1554}
1555
1556template <class _Tp>
1557inline _LIBCPP_INLINE_VISIBILITY
1558typename enable_if
1559<
1560    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1561    _Tp
1562>::type
1563atomic_fetch_or_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1564{
1565    return __o->fetch_or(__op, __m);
1566}
1567
1568// atomic_fetch_xor
1569
1570template <class _Tp>
1571inline _LIBCPP_INLINE_VISIBILITY
1572typename enable_if
1573<
1574    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1575    _Tp
1576>::type
1577atomic_fetch_xor(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1578{
1579    return __o->fetch_xor(__op);
1580}
1581
1582template <class _Tp>
1583inline _LIBCPP_INLINE_VISIBILITY
1584typename enable_if
1585<
1586    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1587    _Tp
1588>::type
1589atomic_fetch_xor(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1590{
1591    return __o->fetch_xor(__op);
1592}
1593
1594// atomic_fetch_xor_explicit
1595
1596template <class _Tp>
1597inline _LIBCPP_INLINE_VISIBILITY
1598typename enable_if
1599<
1600    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1601    _Tp
1602>::type
1603atomic_fetch_xor_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1604{
1605    return __o->fetch_xor(__op, __m);
1606}
1607
1608template <class _Tp>
1609inline _LIBCPP_INLINE_VISIBILITY
1610typename enable_if
1611<
1612    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1613    _Tp
1614>::type
1615atomic_fetch_xor_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1616{
1617    return __o->fetch_xor(__op, __m);
1618}
1619
1620// flag type and operations
1621
1622typedef struct atomic_flag
1623{
1624    _Atomic(bool) __a_;
1625
1626    _LIBCPP_INLINE_VISIBILITY
1627    bool test_and_set(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1628        {return __c11_atomic_exchange(&__a_, true, __m);}
1629    _LIBCPP_INLINE_VISIBILITY
1630    bool test_and_set(memory_order __m = memory_order_seq_cst) _NOEXCEPT
1631        {return __c11_atomic_exchange(&__a_, true, __m);}
1632    _LIBCPP_INLINE_VISIBILITY
1633    void clear(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1634        {__c11_atomic_store(&__a_, false, __m);}
1635    _LIBCPP_INLINE_VISIBILITY
1636    void clear(memory_order __m = memory_order_seq_cst) _NOEXCEPT
1637        {__c11_atomic_store(&__a_, false, __m);}
1638
1639    _LIBCPP_INLINE_VISIBILITY
1640#ifndef _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
1641    atomic_flag() _NOEXCEPT = default;
1642#else
1643    atomic_flag() _NOEXCEPT : __a_() {}
1644#endif // _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
1645
1646    _LIBCPP_INLINE_VISIBILITY
1647    atomic_flag(bool __b) _NOEXCEPT : __a_(__b) {}
1648
1649#ifndef _LIBCPP_HAS_NO_DELETED_FUNCTIONS
1650    atomic_flag(const atomic_flag&) = delete;
1651    atomic_flag& operator=(const atomic_flag&) = delete;
1652    atomic_flag& operator=(const atomic_flag&) volatile = delete;
1653#else  // _LIBCPP_HAS_NO_DELETED_FUNCTIONS
1654private:
1655    atomic_flag(const atomic_flag&);
1656    atomic_flag& operator=(const atomic_flag&);
1657    atomic_flag& operator=(const atomic_flag&) volatile;
1658#endif  // _LIBCPP_HAS_NO_DELETED_FUNCTIONS
1659} atomic_flag;
1660
1661inline _LIBCPP_INLINE_VISIBILITY
1662bool
1663atomic_flag_test_and_set(volatile atomic_flag* __o) _NOEXCEPT
1664{
1665    return __o->test_and_set();
1666}
1667
1668inline _LIBCPP_INLINE_VISIBILITY
1669bool
1670atomic_flag_test_and_set(atomic_flag* __o) _NOEXCEPT
1671{
1672    return __o->test_and_set();
1673}
1674
1675inline _LIBCPP_INLINE_VISIBILITY
1676bool
1677atomic_flag_test_and_set_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
1678{
1679    return __o->test_and_set(__m);
1680}
1681
1682inline _LIBCPP_INLINE_VISIBILITY
1683bool
1684atomic_flag_test_and_set_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT
1685{
1686    return __o->test_and_set(__m);
1687}
1688
1689inline _LIBCPP_INLINE_VISIBILITY
1690void
1691atomic_flag_clear(volatile atomic_flag* __o) _NOEXCEPT
1692{
1693    __o->clear();
1694}
1695
1696inline _LIBCPP_INLINE_VISIBILITY
1697void
1698atomic_flag_clear(atomic_flag* __o) _NOEXCEPT
1699{
1700    __o->clear();
1701}
1702
1703inline _LIBCPP_INLINE_VISIBILITY
1704void
1705atomic_flag_clear_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
1706{
1707    __o->clear(__m);
1708}
1709
1710inline _LIBCPP_INLINE_VISIBILITY
1711void
1712atomic_flag_clear_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT
1713{
1714    __o->clear(__m);
1715}
1716
1717// fences
1718
1719inline _LIBCPP_INLINE_VISIBILITY
1720void
1721atomic_thread_fence(memory_order __m) _NOEXCEPT
1722{
1723    __c11_atomic_thread_fence(__m);
1724}
1725
1726inline _LIBCPP_INLINE_VISIBILITY
1727void
1728atomic_signal_fence(memory_order __m) _NOEXCEPT
1729{
1730    __c11_atomic_signal_fence(__m);
1731}
1732
1733// Atomics for standard typedef types
1734
1735typedef atomic<bool>               atomic_bool;
1736typedef atomic<char>               atomic_char;
1737typedef atomic<signed char>        atomic_schar;
1738typedef atomic<unsigned char>      atomic_uchar;
1739typedef atomic<short>              atomic_short;
1740typedef atomic<unsigned short>     atomic_ushort;
1741typedef atomic<int>                atomic_int;
1742typedef atomic<unsigned int>       atomic_uint;
1743typedef atomic<long>               atomic_long;
1744typedef atomic<unsigned long>      atomic_ulong;
1745typedef atomic<long long>          atomic_llong;
1746typedef atomic<unsigned long long> atomic_ullong;
1747typedef atomic<char16_t>           atomic_char16_t;
1748typedef atomic<char32_t>           atomic_char32_t;
1749typedef atomic<wchar_t>            atomic_wchar_t;
1750
1751typedef atomic<int_least8_t>   atomic_int_least8_t;
1752typedef atomic<uint_least8_t>  atomic_uint_least8_t;
1753typedef atomic<int_least16_t>  atomic_int_least16_t;
1754typedef atomic<uint_least16_t> atomic_uint_least16_t;
1755typedef atomic<int_least32_t>  atomic_int_least32_t;
1756typedef atomic<uint_least32_t> atomic_uint_least32_t;
1757typedef atomic<int_least64_t>  atomic_int_least64_t;
1758typedef atomic<uint_least64_t> atomic_uint_least64_t;
1759
1760typedef atomic<int_fast8_t>   atomic_int_fast8_t;
1761typedef atomic<uint_fast8_t>  atomic_uint_fast8_t;
1762typedef atomic<int_fast16_t>  atomic_int_fast16_t;
1763typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
1764typedef atomic<int_fast32_t>  atomic_int_fast32_t;
1765typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
1766typedef atomic<int_fast64_t>  atomic_int_fast64_t;
1767typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
1768
1769typedef atomic<intptr_t>  atomic_intptr_t;
1770typedef atomic<uintptr_t> atomic_uintptr_t;
1771typedef atomic<size_t>    atomic_size_t;
1772typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
1773typedef atomic<intmax_t>  atomic_intmax_t;
1774typedef atomic<uintmax_t> atomic_uintmax_t;
1775
1776#define ATOMIC_FLAG_INIT {false}
1777#define ATOMIC_VAR_INIT(__v) {__v}
1778
1779// lock-free property
1780
1781#define ATOMIC_BOOL_LOCK_FREE      __GCC_ATOMIC_BOOL_LOCK_FREE
1782#define ATOMIC_CHAR_LOCK_FREE      __GCC_ATOMIC_CHAR_LOCK_FREE
1783#define ATOMIC_CHAR16_T_LOCK_FREE  __GCC_ATOMIC_CHAR16_T_LOCK_FREE
1784#define ATOMIC_CHAR32_T_LOCK_FREE  __GCC_ATOMIC_CHAR32_T_LOCK_FREE
1785#define ATOMIC_WCHAR_T_LOCK_FREE   __GCC_ATOMIC_WCHAR_T_LOCK_FREE
1786#define ATOMIC_SHORT_LOCK_FREE     __GCC_ATOMIC_SHORT_LOCK_FREE
1787#define ATOMIC_INT_LOCK_FREE       __GCC_ATOMIC_INT_LOCK_FREE
1788#define ATOMIC_LONG_LOCK_FREE      __GCC_ATOMIC_LONG_LOCK_FREE
1789#define ATOMIC_LLONG_LOCK_FREE     __GCC_ATOMIC_LLONG_LOCK_FREE
1790#define ATOMIC_POINTER_LOCK_FREE   __GCC_ATOMIC_POINTER_LOCK_FREE
1791
1792#endif  //  !__has_feature(cxx_atomic)
1793
1794_LIBCPP_END_NAMESPACE_STD
1795
1796#endif  // !_LIBCPP_HAS_NO_THREADS
1797
1798#endif  // _LIBCPP_ATOMIC
1799