1// -*- C++ -*-
2//===--------------------------- atomic -----------------------------------===//
3//
4//                     The LLVM Compiler Infrastructure
5//
6// This file is distributed under the University of Illinois Open Source
7// License. See LICENSE.TXT for details.
8//
9//===----------------------------------------------------------------------===//
10
11#ifndef _LIBCPP_ATOMIC
12#define _LIBCPP_ATOMIC
13
14/*
15    atomic synopsis
16
17namespace std
18{
19
20// order and consistency
21
22typedef enum memory_order
23{
24    memory_order_relaxed,
25    memory_order_consume,  // load-consume
26    memory_order_acquire,  // load-acquire
27    memory_order_release,  // store-release
28    memory_order_acq_rel,  // store-release load-acquire
29    memory_order_seq_cst   // store-release load-acquire
30} memory_order;
31
32template <class T> T kill_dependency(T y) noexcept;
33
34// lock-free property
35
36#define ATOMIC_BOOL_LOCK_FREE unspecified
37#define ATOMIC_CHAR_LOCK_FREE unspecified
38#define ATOMIC_CHAR16_T_LOCK_FREE unspecified
39#define ATOMIC_CHAR32_T_LOCK_FREE unspecified
40#define ATOMIC_WCHAR_T_LOCK_FREE unspecified
41#define ATOMIC_SHORT_LOCK_FREE unspecified
42#define ATOMIC_INT_LOCK_FREE unspecified
43#define ATOMIC_LONG_LOCK_FREE unspecified
44#define ATOMIC_LLONG_LOCK_FREE unspecified
45#define ATOMIC_POINTER_LOCK_FREE unspecified
46
47// flag type and operations
48
49typedef struct atomic_flag
50{
51    bool test_and_set(memory_order m = memory_order_seq_cst) volatile noexcept;
52    bool test_and_set(memory_order m = memory_order_seq_cst) noexcept;
53    void clear(memory_order m = memory_order_seq_cst) volatile noexcept;
54    void clear(memory_order m = memory_order_seq_cst) noexcept;
55    atomic_flag()  noexcept = default;
56    atomic_flag(const atomic_flag&) = delete;
57    atomic_flag& operator=(const atomic_flag&) = delete;
58    atomic_flag& operator=(const atomic_flag&) volatile = delete;
59} atomic_flag;
60
61bool
62    atomic_flag_test_and_set(volatile atomic_flag* obj) noexcept;
63
64bool
65    atomic_flag_test_and_set(atomic_flag* obj) noexcept;
66
67bool
68    atomic_flag_test_and_set_explicit(volatile atomic_flag* obj,
69                                      memory_order m) noexcept;
70
71bool
72    atomic_flag_test_and_set_explicit(atomic_flag* obj, memory_order m) noexcept;
73
74void
75    atomic_flag_clear(volatile atomic_flag* obj) noexcept;
76
77void
78    atomic_flag_clear(atomic_flag* obj) noexcept;
79
80void
81    atomic_flag_clear_explicit(volatile atomic_flag* obj, memory_order m) noexcept;
82
83void
84    atomic_flag_clear_explicit(atomic_flag* obj, memory_order m) noexcept;
85
86#define ATOMIC_FLAG_INIT see below
87#define ATOMIC_VAR_INIT(value) see below
88
89template <class T>
90struct atomic
91{
92    bool is_lock_free() const volatile noexcept;
93    bool is_lock_free() const noexcept;
94    void store(T desr, memory_order m = memory_order_seq_cst) volatile noexcept;
95    void store(T desr, memory_order m = memory_order_seq_cst) noexcept;
96    T load(memory_order m = memory_order_seq_cst) const volatile noexcept;
97    T load(memory_order m = memory_order_seq_cst) const noexcept;
98    operator T() const volatile noexcept;
99    operator T() const noexcept;
100    T exchange(T desr, memory_order m = memory_order_seq_cst) volatile noexcept;
101    T exchange(T desr, memory_order m = memory_order_seq_cst) noexcept;
102    bool compare_exchange_weak(T& expc, T desr,
103                               memory_order s, memory_order f) volatile noexcept;
104    bool compare_exchange_weak(T& expc, T desr, memory_order s, memory_order f) noexcept;
105    bool compare_exchange_strong(T& expc, T desr,
106                                 memory_order s, memory_order f) volatile noexcept;
107    bool compare_exchange_strong(T& expc, T desr,
108                                 memory_order s, memory_order f) noexcept;
109    bool compare_exchange_weak(T& expc, T desr,
110                               memory_order m = memory_order_seq_cst) volatile noexcept;
111    bool compare_exchange_weak(T& expc, T desr,
112                               memory_order m = memory_order_seq_cst) noexcept;
113    bool compare_exchange_strong(T& expc, T desr,
114                                memory_order m = memory_order_seq_cst) volatile noexcept;
115    bool compare_exchange_strong(T& expc, T desr,
116                                 memory_order m = memory_order_seq_cst) noexcept;
117
118    atomic() noexcept = default;
119    constexpr atomic(T desr) noexcept;
120    atomic(const atomic&) = delete;
121    atomic& operator=(const atomic&) = delete;
122    atomic& operator=(const atomic&) volatile = delete;
123    T operator=(T) volatile noexcept;
124    T operator=(T) noexcept;
125};
126
127template <>
128struct atomic<integral>
129{
130    bool is_lock_free() const volatile noexcept;
131    bool is_lock_free() const noexcept;
132    void store(integral desr, memory_order m = memory_order_seq_cst) volatile noexcept;
133    void store(integral desr, memory_order m = memory_order_seq_cst) noexcept;
134    integral load(memory_order m = memory_order_seq_cst) const volatile noexcept;
135    integral load(memory_order m = memory_order_seq_cst) const noexcept;
136    operator integral() const volatile noexcept;
137    operator integral() const noexcept;
138    integral exchange(integral desr,
139                      memory_order m = memory_order_seq_cst) volatile noexcept;
140    integral exchange(integral desr, memory_order m = memory_order_seq_cst) noexcept;
141    bool compare_exchange_weak(integral& expc, integral desr,
142                               memory_order s, memory_order f) volatile noexcept;
143    bool compare_exchange_weak(integral& expc, integral desr,
144                               memory_order s, memory_order f) noexcept;
145    bool compare_exchange_strong(integral& expc, integral desr,
146                                 memory_order s, memory_order f) volatile noexcept;
147    bool compare_exchange_strong(integral& expc, integral desr,
148                                 memory_order s, memory_order f) noexcept;
149    bool compare_exchange_weak(integral& expc, integral desr,
150                               memory_order m = memory_order_seq_cst) volatile noexcept;
151    bool compare_exchange_weak(integral& expc, integral desr,
152                               memory_order m = memory_order_seq_cst) noexcept;
153    bool compare_exchange_strong(integral& expc, integral desr,
154                                memory_order m = memory_order_seq_cst) volatile noexcept;
155    bool compare_exchange_strong(integral& expc, integral desr,
156                                 memory_order m = memory_order_seq_cst) noexcept;
157
158    integral
159        fetch_add(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
160    integral fetch_add(integral op, memory_order m = memory_order_seq_cst) noexcept;
161    integral
162        fetch_sub(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
163    integral fetch_sub(integral op, memory_order m = memory_order_seq_cst) noexcept;
164    integral
165        fetch_and(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
166    integral fetch_and(integral op, memory_order m = memory_order_seq_cst) noexcept;
167    integral
168        fetch_or(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
169    integral fetch_or(integral op, memory_order m = memory_order_seq_cst) noexcept;
170    integral
171        fetch_xor(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
172    integral fetch_xor(integral op, memory_order m = memory_order_seq_cst) noexcept;
173
174    atomic() noexcept = default;
175    constexpr atomic(integral desr) noexcept;
176    atomic(const atomic&) = delete;
177    atomic& operator=(const atomic&) = delete;
178    atomic& operator=(const atomic&) volatile = delete;
179    integral operator=(integral desr) volatile noexcept;
180    integral operator=(integral desr) noexcept;
181
182    integral operator++(int) volatile noexcept;
183    integral operator++(int) noexcept;
184    integral operator--(int) volatile noexcept;
185    integral operator--(int) noexcept;
186    integral operator++() volatile noexcept;
187    integral operator++() noexcept;
188    integral operator--() volatile noexcept;
189    integral operator--() noexcept;
190    integral operator+=(integral op) volatile noexcept;
191    integral operator+=(integral op) noexcept;
192    integral operator-=(integral op) volatile noexcept;
193    integral operator-=(integral op) noexcept;
194    integral operator&=(integral op) volatile noexcept;
195    integral operator&=(integral op) noexcept;
196    integral operator|=(integral op) volatile noexcept;
197    integral operator|=(integral op) noexcept;
198    integral operator^=(integral op) volatile noexcept;
199    integral operator^=(integral op) noexcept;
200};
201
202template <class T>
203struct atomic<T*>
204{
205    bool is_lock_free() const volatile noexcept;
206    bool is_lock_free() const noexcept;
207    void store(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept;
208    void store(T* desr, memory_order m = memory_order_seq_cst) noexcept;
209    T* load(memory_order m = memory_order_seq_cst) const volatile noexcept;
210    T* load(memory_order m = memory_order_seq_cst) const noexcept;
211    operator T*() const volatile noexcept;
212    operator T*() const noexcept;
213    T* exchange(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept;
214    T* exchange(T* desr, memory_order m = memory_order_seq_cst) noexcept;
215    bool compare_exchange_weak(T*& expc, T* desr,
216                               memory_order s, memory_order f) volatile noexcept;
217    bool compare_exchange_weak(T*& expc, T* desr,
218                               memory_order s, memory_order f) noexcept;
219    bool compare_exchange_strong(T*& expc, T* desr,
220                                 memory_order s, memory_order f) volatile noexcept;
221    bool compare_exchange_strong(T*& expc, T* desr,
222                                 memory_order s, memory_order f) noexcept;
223    bool compare_exchange_weak(T*& expc, T* desr,
224                               memory_order m = memory_order_seq_cst) volatile noexcept;
225    bool compare_exchange_weak(T*& expc, T* desr,
226                               memory_order m = memory_order_seq_cst) noexcept;
227    bool compare_exchange_strong(T*& expc, T* desr,
228                                memory_order m = memory_order_seq_cst) volatile noexcept;
229    bool compare_exchange_strong(T*& expc, T* desr,
230                                 memory_order m = memory_order_seq_cst) noexcept;
231    T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept;
232    T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept;
233    T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept;
234    T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept;
235
236    atomic() noexcept = default;
237    constexpr atomic(T* desr) noexcept;
238    atomic(const atomic&) = delete;
239    atomic& operator=(const atomic&) = delete;
240    atomic& operator=(const atomic&) volatile = delete;
241
242    T* operator=(T*) volatile noexcept;
243    T* operator=(T*) noexcept;
244    T* operator++(int) volatile noexcept;
245    T* operator++(int) noexcept;
246    T* operator--(int) volatile noexcept;
247    T* operator--(int) noexcept;
248    T* operator++() volatile noexcept;
249    T* operator++() noexcept;
250    T* operator--() volatile noexcept;
251    T* operator--() noexcept;
252    T* operator+=(ptrdiff_t op) volatile noexcept;
253    T* operator+=(ptrdiff_t op) noexcept;
254    T* operator-=(ptrdiff_t op) volatile noexcept;
255    T* operator-=(ptrdiff_t op) noexcept;
256};
257
258
259template <class T>
260    bool
261    atomic_is_lock_free(const volatile atomic<T>* obj) noexcept;
262
263template <class T>
264    bool
265    atomic_is_lock_free(const atomic<T>* obj) noexcept;
266
267template <class T>
268    void
269    atomic_init(volatile atomic<T>* obj, T desr) noexcept;
270
271template <class T>
272    void
273    atomic_init(atomic<T>* obj, T desr) noexcept;
274
275template <class T>
276    void
277    atomic_store(volatile atomic<T>* obj, T desr) noexcept;
278
279template <class T>
280    void
281    atomic_store(atomic<T>* obj, T desr) noexcept;
282
283template <class T>
284    void
285    atomic_store_explicit(volatile atomic<T>* obj, T desr, memory_order m) noexcept;
286
287template <class T>
288    void
289    atomic_store_explicit(atomic<T>* obj, T desr, memory_order m) noexcept;
290
291template <class T>
292    T
293    atomic_load(const volatile atomic<T>* obj) noexcept;
294
295template <class T>
296    T
297    atomic_load(const atomic<T>* obj) noexcept;
298
299template <class T>
300    T
301    atomic_load_explicit(const volatile atomic<T>* obj, memory_order m) noexcept;
302
303template <class T>
304    T
305    atomic_load_explicit(const atomic<T>* obj, memory_order m) noexcept;
306
307template <class T>
308    T
309    atomic_exchange(volatile atomic<T>* obj, T desr) noexcept;
310
311template <class T>
312    T
313    atomic_exchange(atomic<T>* obj, T desr) noexcept;
314
315template <class T>
316    T
317    atomic_exchange_explicit(volatile atomic<T>* obj, T desr, memory_order m) noexcept;
318
319template <class T>
320    T
321    atomic_exchange_explicit(atomic<T>* obj, T desr, memory_order m) noexcept;
322
323template <class T>
324    bool
325    atomic_compare_exchange_weak(volatile atomic<T>* obj, T* expc, T desr) noexcept;
326
327template <class T>
328    bool
329    atomic_compare_exchange_weak(atomic<T>* obj, T* expc, T desr) noexcept;
330
331template <class T>
332    bool
333    atomic_compare_exchange_strong(volatile atomic<T>* obj, T* expc, T desr) noexcept;
334
335template <class T>
336    bool
337    atomic_compare_exchange_strong(atomic<T>* obj, T* expc, T desr) noexcept;
338
339template <class T>
340    bool
341    atomic_compare_exchange_weak_explicit(volatile atomic<T>* obj, T* expc,
342                                          T desr,
343                                          memory_order s, memory_order f) noexcept;
344
345template <class T>
346    bool
347    atomic_compare_exchange_weak_explicit(atomic<T>* obj, T* expc, T desr,
348                                          memory_order s, memory_order f) noexcept;
349
350template <class T>
351    bool
352    atomic_compare_exchange_strong_explicit(volatile atomic<T>* obj,
353                                            T* expc, T desr,
354                                            memory_order s, memory_order f) noexcept;
355
356template <class T>
357    bool
358    atomic_compare_exchange_strong_explicit(atomic<T>* obj, T* expc,
359                                            T desr,
360                                            memory_order s, memory_order f) noexcept;
361
362template <class Integral>
363    Integral
364    atomic_fetch_add(volatile atomic<Integral>* obj, Integral op) noexcept;
365
366template <class Integral>
367    Integral
368    atomic_fetch_add(atomic<Integral>* obj, Integral op) noexcept;
369
370template <class Integral>
371    Integral
372    atomic_fetch_add_explicit(volatile atomic<Integral>* obj, Integral op,
373                              memory_order m) noexcept;
374template <class Integral>
375    Integral
376    atomic_fetch_add_explicit(atomic<Integral>* obj, Integral op,
377                              memory_order m) noexcept;
378template <class Integral>
379    Integral
380    atomic_fetch_sub(volatile atomic<Integral>* obj, Integral op) noexcept;
381
382template <class Integral>
383    Integral
384    atomic_fetch_sub(atomic<Integral>* obj, Integral op) noexcept;
385
386template <class Integral>
387    Integral
388    atomic_fetch_sub_explicit(volatile atomic<Integral>* obj, Integral op,
389                              memory_order m) noexcept;
390template <class Integral>
391    Integral
392    atomic_fetch_sub_explicit(atomic<Integral>* obj, Integral op,
393                              memory_order m) noexcept;
394template <class Integral>
395    Integral
396    atomic_fetch_and(volatile atomic<Integral>* obj, Integral op) noexcept;
397
398template <class Integral>
399    Integral
400    atomic_fetch_and(atomic<Integral>* obj, Integral op) noexcept;
401
402template <class Integral>
403    Integral
404    atomic_fetch_and_explicit(volatile atomic<Integral>* obj, Integral op,
405                              memory_order m) noexcept;
406template <class Integral>
407    Integral
408    atomic_fetch_and_explicit(atomic<Integral>* obj, Integral op,
409                              memory_order m) noexcept;
410template <class Integral>
411    Integral
412    atomic_fetch_or(volatile atomic<Integral>* obj, Integral op) noexcept;
413
414template <class Integral>
415    Integral
416    atomic_fetch_or(atomic<Integral>* obj, Integral op) noexcept;
417
418template <class Integral>
419    Integral
420    atomic_fetch_or_explicit(volatile atomic<Integral>* obj, Integral op,
421                             memory_order m) noexcept;
422template <class Integral>
423    Integral
424    atomic_fetch_or_explicit(atomic<Integral>* obj, Integral op,
425                             memory_order m) noexcept;
426template <class Integral>
427    Integral
428    atomic_fetch_xor(volatile atomic<Integral>* obj, Integral op) noexcept;
429
430template <class Integral>
431    Integral
432    atomic_fetch_xor(atomic<Integral>* obj, Integral op) noexcept;
433
434template <class Integral>
435    Integral
436    atomic_fetch_xor_explicit(volatile atomic<Integral>* obj, Integral op,
437                              memory_order m) noexcept;
438template <class Integral>
439    Integral
440    atomic_fetch_xor_explicit(atomic<Integral>* obj, Integral op,
441                              memory_order m) noexcept;
442
443template <class T>
444    T*
445    atomic_fetch_add(volatile atomic<T*>* obj, ptrdiff_t op) noexcept;
446
447template <class T>
448    T*
449    atomic_fetch_add(atomic<T*>* obj, ptrdiff_t op) noexcept;
450
451template <class T>
452    T*
453    atomic_fetch_add_explicit(volatile atomic<T*>* obj, ptrdiff_t op,
454                              memory_order m) noexcept;
455template <class T>
456    T*
457    atomic_fetch_add_explicit(atomic<T*>* obj, ptrdiff_t op, memory_order m) noexcept;
458
459template <class T>
460    T*
461    atomic_fetch_sub(volatile atomic<T*>* obj, ptrdiff_t op) noexcept;
462
463template <class T>
464    T*
465    atomic_fetch_sub(atomic<T*>* obj, ptrdiff_t op) noexcept;
466
467template <class T>
468    T*
469    atomic_fetch_sub_explicit(volatile atomic<T*>* obj, ptrdiff_t op,
470                              memory_order m) noexcept;
471template <class T>
472    T*
473    atomic_fetch_sub_explicit(atomic<T*>* obj, ptrdiff_t op, memory_order m) noexcept;
474
475// Atomics for standard typedef types
476
477typedef atomic<bool>               atomic_bool;
478typedef atomic<char>               atomic_char;
479typedef atomic<signed char>        atomic_schar;
480typedef atomic<unsigned char>      atomic_uchar;
481typedef atomic<short>              atomic_short;
482typedef atomic<unsigned short>     atomic_ushort;
483typedef atomic<int>                atomic_int;
484typedef atomic<unsigned int>       atomic_uint;
485typedef atomic<long>               atomic_long;
486typedef atomic<unsigned long>      atomic_ulong;
487typedef atomic<long long>          atomic_llong;
488typedef atomic<unsigned long long> atomic_ullong;
489typedef atomic<char16_t>           atomic_char16_t;
490typedef atomic<char32_t>           atomic_char32_t;
491typedef atomic<wchar_t>            atomic_wchar_t;
492
493typedef atomic<int_least8_t>   atomic_int_least8_t;
494typedef atomic<uint_least8_t>  atomic_uint_least8_t;
495typedef atomic<int_least16_t>  atomic_int_least16_t;
496typedef atomic<uint_least16_t> atomic_uint_least16_t;
497typedef atomic<int_least32_t>  atomic_int_least32_t;
498typedef atomic<uint_least32_t> atomic_uint_least32_t;
499typedef atomic<int_least64_t>  atomic_int_least64_t;
500typedef atomic<uint_least64_t> atomic_uint_least64_t;
501
502typedef atomic<int_fast8_t>   atomic_int_fast8_t;
503typedef atomic<uint_fast8_t>  atomic_uint_fast8_t;
504typedef atomic<int_fast16_t>  atomic_int_fast16_t;
505typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
506typedef atomic<int_fast32_t>  atomic_int_fast32_t;
507typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
508typedef atomic<int_fast64_t>  atomic_int_fast64_t;
509typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
510
511typedef atomic<intptr_t>  atomic_intptr_t;
512typedef atomic<uintptr_t> atomic_uintptr_t;
513typedef atomic<size_t>    atomic_size_t;
514typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
515typedef atomic<intmax_t>  atomic_intmax_t;
516typedef atomic<uintmax_t> atomic_uintmax_t;
517
518// fences
519
520void atomic_thread_fence(memory_order m) noexcept;
521void atomic_signal_fence(memory_order m) noexcept;
522
523}  // std
524
525*/
526
527#include <__config>
528#include <cstddef>
529#include <cstdint>
530#include <type_traits>
531
532#if !defined(_LIBCPP_HAS_NO_PRAGMA_SYSTEM_HEADER)
533#pragma GCC system_header
534#endif
535
536_LIBCPP_BEGIN_NAMESPACE_STD
537
538#if !__has_feature(cxx_atomic)
539#error <atomic> is not implemented
540#else
541
542typedef enum memory_order
543{
544    memory_order_relaxed, memory_order_consume, memory_order_acquire,
545    memory_order_release, memory_order_acq_rel, memory_order_seq_cst
546} memory_order;
547
548#if !defined(__clang__)
549
550namespace __gcc_atomic {
551template <typename T>
552struct __gcc_atomic_t {
553  __gcc_atomic_t() _NOEXCEPT {}
554  explicit __gcc_atomic_t(T value) _NOEXCEPT : __a_value(value) {}
555  T __a_value;
556};
557#define _Atomic(x) __gcc_atomic::__gcc_atomic_t<x>
558
559template <typename T> T __create();
560
561template <typename __Tp, typename __Td>
562typename enable_if<sizeof(__Tp()->__a_value = __create<__Td>()), char>::type
563    __test_atomic_assignable(int);
564template <typename T, typename U>
565__two __test_atomic_assignable(...);
566
567template <typename __Tp, typename __Td>
568struct __can_assign {
569  static const bool value =
570      sizeof(__test_atomic_assignable<__Tp, __Td>(1)) == sizeof(char);
571};
572
573static inline int __to_gcc_order(memory_order __order) {
574  switch (__order) {
575    case memory_order_relaxed:
576      return __ATOMIC_RELAXED;
577    case memory_order_consume:
578      return __ATOMIC_CONSUME;
579    case memory_order_acquire:
580      return __ATOMIC_ACQUIRE;
581    case memory_order_release:
582      return __ATOMIC_RELEASE;
583    case memory_order_acq_rel:
584      return __ATOMIC_ACQ_REL;
585    case memory_order_seq_cst:
586      return __ATOMIC_SEQ_CST;
587  }
588}
589
590} // namespace __gcc_atomic
591
592template <typename _Tp>
593static inline
594typename enable_if<
595    __gcc_atomic::__can_assign<volatile _Atomic(_Tp)*, _Tp>::value>::type
596__c11_atomic_init(volatile _Atomic(_Tp)* __a,  _Tp __val) {
597  __a->__a_value = __val;
598}
599
600template <typename _Tp>
601static inline
602typename enable_if<
603    !__gcc_atomic::__can_assign<volatile _Atomic(_Tp)*, _Tp>::value &&
604     __gcc_atomic::__can_assign<         _Atomic(_Tp)*, _Tp>::value>::type
605__c11_atomic_init(volatile _Atomic(_Tp)* __a,  _Tp __val) {
606  // [atomics.types.generic]p1 guarantees _Tp is trivially copyable. Because
607  // the default operator= in an object is not volatile, a byte-by-byte copy
608  // is required.
609  volatile char* to = reinterpret_cast<volatile char*>(&__a->__a_value);
610  volatile char* end = to + sizeof(_Tp);
611  char* from = reinterpret_cast<char*>(&__val);
612  while (to != end) {
613    *to++ = *from++;
614  }
615}
616
617template <typename _Tp>
618static inline void __c11_atomic_init(_Atomic(_Tp)* __a,  _Tp __val) {
619  __a->__a_value = __val;
620}
621
622static inline void __c11_atomic_thread_fence(memory_order __order) {
623  __atomic_thread_fence(__gcc_atomic::__to_gcc_order(__order));
624}
625
626static inline void __c11_atomic_signal_fence(memory_order __order) {
627  __atomic_signal_fence(__gcc_atomic::__to_gcc_order(__order));
628}
629
630static inline bool __c11_atomic_is_lock_free(size_t __size) {
631  return __atomic_is_lock_free(__size, 0);
632}
633
634template <typename _Tp>
635static inline void __c11_atomic_store(volatile _Atomic(_Tp)* __a,  _Tp __val,
636                                      memory_order __order) {
637  return __atomic_store(&__a->__a_value, &__val,
638                        __gcc_atomic::__to_gcc_order(__order));
639}
640
641template <typename _Tp>
642static inline void __c11_atomic_store(_Atomic(_Tp)* __a,  _Tp __val,
643                                      memory_order __order) {
644  return __atomic_store(&__a->__a_value, &__val,
645                        __gcc_atomic::__to_gcc_order(__order));
646}
647
648template <typename _Tp>
649static inline _Tp __c11_atomic_load(volatile _Atomic(_Tp)* __a,
650                                    memory_order __order) {
651  _Tp __ret;
652  __atomic_load(&__a->__a_value, &__ret,
653                __gcc_atomic::__to_gcc_order(__order));
654  return __ret;
655}
656
657template <typename _Tp>
658static inline _Tp __c11_atomic_load(_Atomic(_Tp)* __a, memory_order __order) {
659  _Tp __ret;
660  __atomic_load(&__a->__a_value, &__ret,
661                __gcc_atomic::__to_gcc_order(__order));
662  return __ret;
663}
664
665template <typename _Tp>
666static inline _Tp __c11_atomic_exchange(volatile _Atomic(_Tp)* __a,
667                                        _Tp __value, memory_order __order) {
668  _Tp __ret;
669  __atomic_exchange(&__a->__a_value, &__value, &__ret,
670                    __gcc_atomic::__to_gcc_order(__order));
671  return __ret;
672}
673
674template <typename _Tp>
675static inline _Tp __c11_atomic_exchange(_Atomic(_Tp)* __a, _Tp __value,
676                                        memory_order __order) {
677  _Tp __ret;
678  __atomic_exchange(&__a->__a_value, &__value, &__ret,
679                    __gcc_atomic::__to_gcc_order(__order));
680  return __ret;
681}
682
683template <typename _Tp>
684static inline bool __c11_atomic_compare_exchange_strong(
685    volatile _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value,
686    memory_order __success, memory_order __failure) {
687  return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
688                                   false,
689                                   __gcc_atomic::__to_gcc_order(__success),
690                                   __gcc_atomic::__to_gcc_order(__failure));
691}
692
693template <typename _Tp>
694static inline bool __c11_atomic_compare_exchange_strong(
695    _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value, memory_order __success,
696    memory_order __failure) {
697  return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
698                                   false,
699                                   __gcc_atomic::__to_gcc_order(__success),
700                                   __gcc_atomic::__to_gcc_order(__failure));
701}
702
703template <typename _Tp>
704static inline bool __c11_atomic_compare_exchange_weak(
705    volatile _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value,
706    memory_order __success, memory_order __failure) {
707  return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
708                                   true,
709                                   __gcc_atomic::__to_gcc_order(__success),
710                                   __gcc_atomic::__to_gcc_order(__failure));
711}
712
713template <typename _Tp>
714static inline bool __c11_atomic_compare_exchange_weak(
715    _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value, memory_order __success,
716    memory_order __failure) {
717  return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
718                                   true,
719                                   __gcc_atomic::__to_gcc_order(__success),
720                                   __gcc_atomic::__to_gcc_order(__failure));
721}
722
723template <typename _Tp>
724struct __skip_amt { enum {value = 1}; };
725
726template <typename _Tp>
727struct __skip_amt<_Tp*> { enum {value = sizeof(_Tp)}; };
728
729// FIXME: Haven't figured out what the spec says about using arrays with
730// atomic_fetch_add. Force a failure rather than creating bad behavior.
731template <typename _Tp>
732struct __skip_amt<_Tp[]> { };
733template <typename _Tp, int n>
734struct __skip_amt<_Tp[n]> { };
735
736template <typename _Tp, typename _Td>
737static inline _Tp __c11_atomic_fetch_add(volatile _Atomic(_Tp)* __a,
738                                         _Td __delta, memory_order __order) {
739  return __atomic_fetch_add(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
740                            __gcc_atomic::__to_gcc_order(__order));
741}
742
743template <typename _Tp, typename _Td>
744static inline _Tp __c11_atomic_fetch_add(_Atomic(_Tp)* __a, _Td __delta,
745                                         memory_order __order) {
746  return __atomic_fetch_add(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
747                            __gcc_atomic::__to_gcc_order(__order));
748}
749
750template <typename _Tp, typename _Td>
751static inline _Tp __c11_atomic_fetch_sub(volatile _Atomic(_Tp)* __a,
752                                         _Td __delta, memory_order __order) {
753  return __atomic_fetch_sub(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
754                            __gcc_atomic::__to_gcc_order(__order));
755}
756
757template <typename _Tp, typename _Td>
758static inline _Tp __c11_atomic_fetch_sub(_Atomic(_Tp)* __a, _Td __delta,
759                                         memory_order __order) {
760  return __atomic_fetch_sub(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
761                            __gcc_atomic::__to_gcc_order(__order));
762}
763
764template <typename _Tp>
765static inline _Tp __c11_atomic_fetch_and(volatile _Atomic(_Tp)* __a,
766                                         _Tp __pattern, memory_order __order) {
767  return __atomic_fetch_and(&__a->__a_value, __pattern,
768                            __gcc_atomic::__to_gcc_order(__order));
769}
770
771template <typename _Tp>
772static inline _Tp __c11_atomic_fetch_and(_Atomic(_Tp)* __a,
773                                         _Tp __pattern, memory_order __order) {
774  return __atomic_fetch_and(&__a->__a_value, __pattern,
775                            __gcc_atomic::__to_gcc_order(__order));
776}
777
778template <typename _Tp>
779static inline _Tp __c11_atomic_fetch_or(volatile _Atomic(_Tp)* __a,
780                                        _Tp __pattern, memory_order __order) {
781  return __atomic_fetch_or(&__a->__a_value, __pattern,
782                           __gcc_atomic::__to_gcc_order(__order));
783}
784
785template <typename _Tp>
786static inline _Tp __c11_atomic_fetch_or(_Atomic(_Tp)* __a, _Tp __pattern,
787                                        memory_order __order) {
788  return __atomic_fetch_or(&__a->__a_value, __pattern,
789                           __gcc_atomic::__to_gcc_order(__order));
790}
791
792template <typename _Tp>
793static inline _Tp __c11_atomic_fetch_xor(volatile _Atomic(_Tp)* __a,
794                                         _Tp __pattern, memory_order __order) {
795  return __atomic_fetch_xor(&__a->__a_value, __pattern,
796                            __gcc_atomic::__to_gcc_order(__order));
797}
798
799template <typename _Tp>
800static inline _Tp __c11_atomic_fetch_xor(_Atomic(_Tp)* __a, _Tp __pattern,
801                                         memory_order __order) {
802  return __atomic_fetch_xor(&__a->__a_value, __pattern,
803                            __gcc_atomic::__to_gcc_order(__order));
804}
805
806#endif // !__clang__
807
808template <class _Tp>
809inline _LIBCPP_INLINE_VISIBILITY
810_Tp
811kill_dependency(_Tp __y) _NOEXCEPT
812{
813    return __y;
814}
815
816// general atomic<T>
817
818template <class _Tp, bool = is_integral<_Tp>::value && !is_same<_Tp, bool>::value>
819struct __atomic_base  // false
820{
821    mutable _Atomic(_Tp) __a_;
822
823    _LIBCPP_INLINE_VISIBILITY
824    bool is_lock_free() const volatile _NOEXCEPT
825        {return __c11_atomic_is_lock_free(sizeof(_Tp));}
826    _LIBCPP_INLINE_VISIBILITY
827    bool is_lock_free() const _NOEXCEPT
828        {return __c11_atomic_is_lock_free(sizeof(_Tp));}
829    _LIBCPP_INLINE_VISIBILITY
830    void store(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
831        {__c11_atomic_store(&__a_, __d, __m);}
832    _LIBCPP_INLINE_VISIBILITY
833    void store(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
834        {__c11_atomic_store(&__a_, __d, __m);}
835    _LIBCPP_INLINE_VISIBILITY
836    _Tp load(memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT
837        {return __c11_atomic_load(&__a_, __m);}
838    _LIBCPP_INLINE_VISIBILITY
839    _Tp load(memory_order __m = memory_order_seq_cst) const _NOEXCEPT
840        {return __c11_atomic_load(&__a_, __m);}
841    _LIBCPP_INLINE_VISIBILITY
842    operator _Tp() const volatile _NOEXCEPT {return load();}
843    _LIBCPP_INLINE_VISIBILITY
844    operator _Tp() const _NOEXCEPT          {return load();}
845    _LIBCPP_INLINE_VISIBILITY
846    _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
847        {return __c11_atomic_exchange(&__a_, __d, __m);}
848    _LIBCPP_INLINE_VISIBILITY
849    _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
850        {return __c11_atomic_exchange(&__a_, __d, __m);}
851    _LIBCPP_INLINE_VISIBILITY
852    bool compare_exchange_weak(_Tp& __e, _Tp __d,
853                               memory_order __s, memory_order __f) volatile _NOEXCEPT
854        {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);}
855    _LIBCPP_INLINE_VISIBILITY
856    bool compare_exchange_weak(_Tp& __e, _Tp __d,
857                               memory_order __s, memory_order __f) _NOEXCEPT
858        {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);}
859    _LIBCPP_INLINE_VISIBILITY
860    bool compare_exchange_strong(_Tp& __e, _Tp __d,
861                                 memory_order __s, memory_order __f) volatile _NOEXCEPT
862        {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);}
863    _LIBCPP_INLINE_VISIBILITY
864    bool compare_exchange_strong(_Tp& __e, _Tp __d,
865                                 memory_order __s, memory_order __f) _NOEXCEPT
866        {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);}
867    _LIBCPP_INLINE_VISIBILITY
868    bool compare_exchange_weak(_Tp& __e, _Tp __d,
869                              memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
870        {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);}
871    _LIBCPP_INLINE_VISIBILITY
872    bool compare_exchange_weak(_Tp& __e, _Tp __d,
873                               memory_order __m = memory_order_seq_cst) _NOEXCEPT
874        {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);}
875    _LIBCPP_INLINE_VISIBILITY
876    bool compare_exchange_strong(_Tp& __e, _Tp __d,
877                              memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
878        {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);}
879    _LIBCPP_INLINE_VISIBILITY
880    bool compare_exchange_strong(_Tp& __e, _Tp __d,
881                                 memory_order __m = memory_order_seq_cst) _NOEXCEPT
882        {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);}
883
884    _LIBCPP_INLINE_VISIBILITY
885#ifndef _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
886    __atomic_base() _NOEXCEPT = default;
887#else
888    __atomic_base() _NOEXCEPT : __a_() {}
889#endif // _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
890
891    _LIBCPP_INLINE_VISIBILITY
892    _LIBCPP_CONSTEXPR __atomic_base(_Tp __d) _NOEXCEPT : __a_(__d) {}
893#ifndef _LIBCPP_HAS_NO_DELETED_FUNCTIONS
894    __atomic_base(const __atomic_base&) = delete;
895    __atomic_base& operator=(const __atomic_base&) = delete;
896    __atomic_base& operator=(const __atomic_base&) volatile = delete;
897#else  // _LIBCPP_HAS_NO_DELETED_FUNCTIONS
898private:
899    __atomic_base(const __atomic_base&);
900    __atomic_base& operator=(const __atomic_base&);
901    __atomic_base& operator=(const __atomic_base&) volatile;
902#endif  // _LIBCPP_HAS_NO_DELETED_FUNCTIONS
903};
904
905// atomic<Integral>
906
907template <class _Tp>
908struct __atomic_base<_Tp, true>
909    : public __atomic_base<_Tp, false>
910{
911    typedef __atomic_base<_Tp, false> __base;
912    _LIBCPP_INLINE_VISIBILITY
913    __atomic_base() _NOEXCEPT _LIBCPP_DEFAULT
914    _LIBCPP_INLINE_VISIBILITY
915    _LIBCPP_CONSTEXPR __atomic_base(_Tp __d) _NOEXCEPT : __base(__d) {}
916
917    _LIBCPP_INLINE_VISIBILITY
918    _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
919        {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
920    _LIBCPP_INLINE_VISIBILITY
921    _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
922        {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
923    _LIBCPP_INLINE_VISIBILITY
924    _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
925        {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
926    _LIBCPP_INLINE_VISIBILITY
927    _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
928        {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
929    _LIBCPP_INLINE_VISIBILITY
930    _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
931        {return __c11_atomic_fetch_and(&this->__a_, __op, __m);}
932    _LIBCPP_INLINE_VISIBILITY
933    _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
934        {return __c11_atomic_fetch_and(&this->__a_, __op, __m);}
935    _LIBCPP_INLINE_VISIBILITY
936    _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
937        {return __c11_atomic_fetch_or(&this->__a_, __op, __m);}
938    _LIBCPP_INLINE_VISIBILITY
939    _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
940        {return __c11_atomic_fetch_or(&this->__a_, __op, __m);}
941    _LIBCPP_INLINE_VISIBILITY
942    _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
943        {return __c11_atomic_fetch_xor(&this->__a_, __op, __m);}
944    _LIBCPP_INLINE_VISIBILITY
945    _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
946        {return __c11_atomic_fetch_xor(&this->__a_, __op, __m);}
947
948    _LIBCPP_INLINE_VISIBILITY
949    _Tp operator++(int) volatile _NOEXCEPT      {return fetch_add(_Tp(1));}
950    _LIBCPP_INLINE_VISIBILITY
951    _Tp operator++(int) _NOEXCEPT               {return fetch_add(_Tp(1));}
952    _LIBCPP_INLINE_VISIBILITY
953    _Tp operator--(int) volatile _NOEXCEPT      {return fetch_sub(_Tp(1));}
954    _LIBCPP_INLINE_VISIBILITY
955    _Tp operator--(int) _NOEXCEPT               {return fetch_sub(_Tp(1));}
956    _LIBCPP_INLINE_VISIBILITY
957    _Tp operator++() volatile _NOEXCEPT         {return fetch_add(_Tp(1)) + _Tp(1);}
958    _LIBCPP_INLINE_VISIBILITY
959    _Tp operator++() _NOEXCEPT                  {return fetch_add(_Tp(1)) + _Tp(1);}
960    _LIBCPP_INLINE_VISIBILITY
961    _Tp operator--() volatile _NOEXCEPT         {return fetch_sub(_Tp(1)) - _Tp(1);}
962    _LIBCPP_INLINE_VISIBILITY
963    _Tp operator--() _NOEXCEPT                  {return fetch_sub(_Tp(1)) - _Tp(1);}
964    _LIBCPP_INLINE_VISIBILITY
965    _Tp operator+=(_Tp __op) volatile _NOEXCEPT {return fetch_add(__op) + __op;}
966    _LIBCPP_INLINE_VISIBILITY
967    _Tp operator+=(_Tp __op) _NOEXCEPT          {return fetch_add(__op) + __op;}
968    _LIBCPP_INLINE_VISIBILITY
969    _Tp operator-=(_Tp __op) volatile _NOEXCEPT {return fetch_sub(__op) - __op;}
970    _LIBCPP_INLINE_VISIBILITY
971    _Tp operator-=(_Tp __op) _NOEXCEPT          {return fetch_sub(__op) - __op;}
972    _LIBCPP_INLINE_VISIBILITY
973    _Tp operator&=(_Tp __op) volatile _NOEXCEPT {return fetch_and(__op) & __op;}
974    _LIBCPP_INLINE_VISIBILITY
975    _Tp operator&=(_Tp __op) _NOEXCEPT          {return fetch_and(__op) & __op;}
976    _LIBCPP_INLINE_VISIBILITY
977    _Tp operator|=(_Tp __op) volatile _NOEXCEPT {return fetch_or(__op) | __op;}
978    _LIBCPP_INLINE_VISIBILITY
979    _Tp operator|=(_Tp __op) _NOEXCEPT          {return fetch_or(__op) | __op;}
980    _LIBCPP_INLINE_VISIBILITY
981    _Tp operator^=(_Tp __op) volatile _NOEXCEPT {return fetch_xor(__op) ^ __op;}
982    _LIBCPP_INLINE_VISIBILITY
983    _Tp operator^=(_Tp __op) _NOEXCEPT          {return fetch_xor(__op) ^ __op;}
984};
985
986// atomic<T>
987
988template <class _Tp>
989struct atomic
990    : public __atomic_base<_Tp>
991{
992    typedef __atomic_base<_Tp> __base;
993    _LIBCPP_INLINE_VISIBILITY
994    atomic() _NOEXCEPT _LIBCPP_DEFAULT
995    _LIBCPP_INLINE_VISIBILITY
996    _LIBCPP_CONSTEXPR atomic(_Tp __d) _NOEXCEPT : __base(__d) {}
997
998    _LIBCPP_INLINE_VISIBILITY
999    _Tp operator=(_Tp __d) volatile _NOEXCEPT
1000        {__base::store(__d); return __d;}
1001    _LIBCPP_INLINE_VISIBILITY
1002    _Tp operator=(_Tp __d) _NOEXCEPT
1003        {__base::store(__d); return __d;}
1004};
1005
1006// atomic<T*>
1007
1008template <class _Tp>
1009struct atomic<_Tp*>
1010    : public __atomic_base<_Tp*>
1011{
1012    typedef __atomic_base<_Tp*> __base;
1013    _LIBCPP_INLINE_VISIBILITY
1014    atomic() _NOEXCEPT _LIBCPP_DEFAULT
1015    _LIBCPP_INLINE_VISIBILITY
1016    _LIBCPP_CONSTEXPR atomic(_Tp* __d) _NOEXCEPT : __base(__d) {}
1017
1018    _LIBCPP_INLINE_VISIBILITY
1019    _Tp* operator=(_Tp* __d) volatile _NOEXCEPT
1020        {__base::store(__d); return __d;}
1021    _LIBCPP_INLINE_VISIBILITY
1022    _Tp* operator=(_Tp* __d) _NOEXCEPT
1023        {__base::store(__d); return __d;}
1024
1025    _LIBCPP_INLINE_VISIBILITY
1026    _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst)
1027                                                                        volatile _NOEXCEPT
1028        {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
1029    _LIBCPP_INLINE_VISIBILITY
1030    _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1031        {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
1032    _LIBCPP_INLINE_VISIBILITY
1033    _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst)
1034                                                                        volatile _NOEXCEPT
1035        {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
1036    _LIBCPP_INLINE_VISIBILITY
1037    _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1038        {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
1039
1040    _LIBCPP_INLINE_VISIBILITY
1041    _Tp* operator++(int) volatile _NOEXCEPT            {return fetch_add(1);}
1042    _LIBCPP_INLINE_VISIBILITY
1043    _Tp* operator++(int) _NOEXCEPT                     {return fetch_add(1);}
1044    _LIBCPP_INLINE_VISIBILITY
1045    _Tp* operator--(int) volatile _NOEXCEPT            {return fetch_sub(1);}
1046    _LIBCPP_INLINE_VISIBILITY
1047    _Tp* operator--(int) _NOEXCEPT                     {return fetch_sub(1);}
1048    _LIBCPP_INLINE_VISIBILITY
1049    _Tp* operator++() volatile _NOEXCEPT               {return fetch_add(1) + 1;}
1050    _LIBCPP_INLINE_VISIBILITY
1051    _Tp* operator++() _NOEXCEPT                        {return fetch_add(1) + 1;}
1052    _LIBCPP_INLINE_VISIBILITY
1053    _Tp* operator--() volatile _NOEXCEPT               {return fetch_sub(1) - 1;}
1054    _LIBCPP_INLINE_VISIBILITY
1055    _Tp* operator--() _NOEXCEPT                        {return fetch_sub(1) - 1;}
1056    _LIBCPP_INLINE_VISIBILITY
1057    _Tp* operator+=(ptrdiff_t __op) volatile _NOEXCEPT {return fetch_add(__op) + __op;}
1058    _LIBCPP_INLINE_VISIBILITY
1059    _Tp* operator+=(ptrdiff_t __op) _NOEXCEPT          {return fetch_add(__op) + __op;}
1060    _LIBCPP_INLINE_VISIBILITY
1061    _Tp* operator-=(ptrdiff_t __op) volatile _NOEXCEPT {return fetch_sub(__op) - __op;}
1062    _LIBCPP_INLINE_VISIBILITY
1063    _Tp* operator-=(ptrdiff_t __op) _NOEXCEPT          {return fetch_sub(__op) - __op;}
1064};
1065
1066// atomic_is_lock_free
1067
1068template <class _Tp>
1069inline _LIBCPP_INLINE_VISIBILITY
1070bool
1071atomic_is_lock_free(const volatile atomic<_Tp>* __o) _NOEXCEPT
1072{
1073    return __o->is_lock_free();
1074}
1075
1076template <class _Tp>
1077inline _LIBCPP_INLINE_VISIBILITY
1078bool
1079atomic_is_lock_free(const atomic<_Tp>* __o) _NOEXCEPT
1080{
1081    return __o->is_lock_free();
1082}
1083
1084// atomic_init
1085
1086template <class _Tp>
1087inline _LIBCPP_INLINE_VISIBILITY
1088void
1089atomic_init(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1090{
1091    __c11_atomic_init(&__o->__a_, __d);
1092}
1093
1094template <class _Tp>
1095inline _LIBCPP_INLINE_VISIBILITY
1096void
1097atomic_init(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1098{
1099    __c11_atomic_init(&__o->__a_, __d);
1100}
1101
1102// atomic_store
1103
1104template <class _Tp>
1105inline _LIBCPP_INLINE_VISIBILITY
1106void
1107atomic_store(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1108{
1109    __o->store(__d);
1110}
1111
1112template <class _Tp>
1113inline _LIBCPP_INLINE_VISIBILITY
1114void
1115atomic_store(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1116{
1117    __o->store(__d);
1118}
1119
1120// atomic_store_explicit
1121
1122template <class _Tp>
1123inline _LIBCPP_INLINE_VISIBILITY
1124void
1125atomic_store_explicit(volatile atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1126{
1127    __o->store(__d, __m);
1128}
1129
1130template <class _Tp>
1131inline _LIBCPP_INLINE_VISIBILITY
1132void
1133atomic_store_explicit(atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1134{
1135    __o->store(__d, __m);
1136}
1137
1138// atomic_load
1139
1140template <class _Tp>
1141inline _LIBCPP_INLINE_VISIBILITY
1142_Tp
1143atomic_load(const volatile atomic<_Tp>* __o) _NOEXCEPT
1144{
1145    return __o->load();
1146}
1147
1148template <class _Tp>
1149inline _LIBCPP_INLINE_VISIBILITY
1150_Tp
1151atomic_load(const atomic<_Tp>* __o) _NOEXCEPT
1152{
1153    return __o->load();
1154}
1155
1156// atomic_load_explicit
1157
1158template <class _Tp>
1159inline _LIBCPP_INLINE_VISIBILITY
1160_Tp
1161atomic_load_explicit(const volatile atomic<_Tp>* __o, memory_order __m) _NOEXCEPT
1162{
1163    return __o->load(__m);
1164}
1165
1166template <class _Tp>
1167inline _LIBCPP_INLINE_VISIBILITY
1168_Tp
1169atomic_load_explicit(const atomic<_Tp>* __o, memory_order __m) _NOEXCEPT
1170{
1171    return __o->load(__m);
1172}
1173
1174// atomic_exchange
1175
1176template <class _Tp>
1177inline _LIBCPP_INLINE_VISIBILITY
1178_Tp
1179atomic_exchange(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1180{
1181    return __o->exchange(__d);
1182}
1183
1184template <class _Tp>
1185inline _LIBCPP_INLINE_VISIBILITY
1186_Tp
1187atomic_exchange(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1188{
1189    return __o->exchange(__d);
1190}
1191
1192// atomic_exchange_explicit
1193
1194template <class _Tp>
1195inline _LIBCPP_INLINE_VISIBILITY
1196_Tp
1197atomic_exchange_explicit(volatile atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1198{
1199    return __o->exchange(__d, __m);
1200}
1201
1202template <class _Tp>
1203inline _LIBCPP_INLINE_VISIBILITY
1204_Tp
1205atomic_exchange_explicit(atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1206{
1207    return __o->exchange(__d, __m);
1208}
1209
1210// atomic_compare_exchange_weak
1211
1212template <class _Tp>
1213inline _LIBCPP_INLINE_VISIBILITY
1214bool
1215atomic_compare_exchange_weak(volatile atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
1216{
1217    return __o->compare_exchange_weak(*__e, __d);
1218}
1219
1220template <class _Tp>
1221inline _LIBCPP_INLINE_VISIBILITY
1222bool
1223atomic_compare_exchange_weak(atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
1224{
1225    return __o->compare_exchange_weak(*__e, __d);
1226}
1227
1228// atomic_compare_exchange_strong
1229
1230template <class _Tp>
1231inline _LIBCPP_INLINE_VISIBILITY
1232bool
1233atomic_compare_exchange_strong(volatile atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
1234{
1235    return __o->compare_exchange_strong(*__e, __d);
1236}
1237
1238template <class _Tp>
1239inline _LIBCPP_INLINE_VISIBILITY
1240bool
1241atomic_compare_exchange_strong(atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
1242{
1243    return __o->compare_exchange_strong(*__e, __d);
1244}
1245
1246// atomic_compare_exchange_weak_explicit
1247
1248template <class _Tp>
1249inline _LIBCPP_INLINE_VISIBILITY
1250bool
1251atomic_compare_exchange_weak_explicit(volatile atomic<_Tp>* __o, _Tp* __e,
1252                                      _Tp __d,
1253                                      memory_order __s, memory_order __f) _NOEXCEPT
1254{
1255    return __o->compare_exchange_weak(*__e, __d, __s, __f);
1256}
1257
1258template <class _Tp>
1259inline _LIBCPP_INLINE_VISIBILITY
1260bool
1261atomic_compare_exchange_weak_explicit(atomic<_Tp>* __o, _Tp* __e, _Tp __d,
1262                                      memory_order __s, memory_order __f) _NOEXCEPT
1263{
1264    return __o->compare_exchange_weak(*__e, __d, __s, __f);
1265}
1266
1267// atomic_compare_exchange_strong_explicit
1268
1269template <class _Tp>
1270inline _LIBCPP_INLINE_VISIBILITY
1271bool
1272atomic_compare_exchange_strong_explicit(volatile atomic<_Tp>* __o,
1273                                        _Tp* __e, _Tp __d,
1274                                        memory_order __s, memory_order __f) _NOEXCEPT
1275{
1276    return __o->compare_exchange_strong(*__e, __d, __s, __f);
1277}
1278
1279template <class _Tp>
1280inline _LIBCPP_INLINE_VISIBILITY
1281bool
1282atomic_compare_exchange_strong_explicit(atomic<_Tp>* __o, _Tp* __e,
1283                                        _Tp __d,
1284                                        memory_order __s, memory_order __f) _NOEXCEPT
1285{
1286    return __o->compare_exchange_strong(*__e, __d, __s, __f);
1287}
1288
1289// atomic_fetch_add
1290
1291template <class _Tp>
1292inline _LIBCPP_INLINE_VISIBILITY
1293typename enable_if
1294<
1295    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1296    _Tp
1297>::type
1298atomic_fetch_add(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1299{
1300    return __o->fetch_add(__op);
1301}
1302
1303template <class _Tp>
1304inline _LIBCPP_INLINE_VISIBILITY
1305typename enable_if
1306<
1307    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1308    _Tp
1309>::type
1310atomic_fetch_add(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1311{
1312    return __o->fetch_add(__op);
1313}
1314
1315template <class _Tp>
1316inline _LIBCPP_INLINE_VISIBILITY
1317_Tp*
1318atomic_fetch_add(volatile atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
1319{
1320    return __o->fetch_add(__op);
1321}
1322
1323template <class _Tp>
1324inline _LIBCPP_INLINE_VISIBILITY
1325_Tp*
1326atomic_fetch_add(atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
1327{
1328    return __o->fetch_add(__op);
1329}
1330
1331// atomic_fetch_add_explicit
1332
1333template <class _Tp>
1334inline _LIBCPP_INLINE_VISIBILITY
1335typename enable_if
1336<
1337    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1338    _Tp
1339>::type
1340atomic_fetch_add_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1341{
1342    return __o->fetch_add(__op, __m);
1343}
1344
1345template <class _Tp>
1346inline _LIBCPP_INLINE_VISIBILITY
1347typename enable_if
1348<
1349    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1350    _Tp
1351>::type
1352atomic_fetch_add_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1353{
1354    return __o->fetch_add(__op, __m);
1355}
1356
1357template <class _Tp>
1358inline _LIBCPP_INLINE_VISIBILITY
1359_Tp*
1360atomic_fetch_add_explicit(volatile atomic<_Tp*>* __o, ptrdiff_t __op,
1361                          memory_order __m) _NOEXCEPT
1362{
1363    return __o->fetch_add(__op, __m);
1364}
1365
1366template <class _Tp>
1367inline _LIBCPP_INLINE_VISIBILITY
1368_Tp*
1369atomic_fetch_add_explicit(atomic<_Tp*>* __o, ptrdiff_t __op, memory_order __m) _NOEXCEPT
1370{
1371    return __o->fetch_add(__op, __m);
1372}
1373
1374// atomic_fetch_sub
1375
1376template <class _Tp>
1377inline _LIBCPP_INLINE_VISIBILITY
1378typename enable_if
1379<
1380    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1381    _Tp
1382>::type
1383atomic_fetch_sub(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1384{
1385    return __o->fetch_sub(__op);
1386}
1387
1388template <class _Tp>
1389inline _LIBCPP_INLINE_VISIBILITY
1390typename enable_if
1391<
1392    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1393    _Tp
1394>::type
1395atomic_fetch_sub(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1396{
1397    return __o->fetch_sub(__op);
1398}
1399
1400template <class _Tp>
1401inline _LIBCPP_INLINE_VISIBILITY
1402_Tp*
1403atomic_fetch_sub(volatile atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
1404{
1405    return __o->fetch_sub(__op);
1406}
1407
1408template <class _Tp>
1409inline _LIBCPP_INLINE_VISIBILITY
1410_Tp*
1411atomic_fetch_sub(atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
1412{
1413    return __o->fetch_sub(__op);
1414}
1415
1416// atomic_fetch_sub_explicit
1417
1418template <class _Tp>
1419inline _LIBCPP_INLINE_VISIBILITY
1420typename enable_if
1421<
1422    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1423    _Tp
1424>::type
1425atomic_fetch_sub_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1426{
1427    return __o->fetch_sub(__op, __m);
1428}
1429
1430template <class _Tp>
1431inline _LIBCPP_INLINE_VISIBILITY
1432typename enable_if
1433<
1434    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1435    _Tp
1436>::type
1437atomic_fetch_sub_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1438{
1439    return __o->fetch_sub(__op, __m);
1440}
1441
1442template <class _Tp>
1443inline _LIBCPP_INLINE_VISIBILITY
1444_Tp*
1445atomic_fetch_sub_explicit(volatile atomic<_Tp*>* __o, ptrdiff_t __op,
1446                          memory_order __m) _NOEXCEPT
1447{
1448    return __o->fetch_sub(__op, __m);
1449}
1450
1451template <class _Tp>
1452inline _LIBCPP_INLINE_VISIBILITY
1453_Tp*
1454atomic_fetch_sub_explicit(atomic<_Tp*>* __o, ptrdiff_t __op, memory_order __m) _NOEXCEPT
1455{
1456    return __o->fetch_sub(__op, __m);
1457}
1458
1459// atomic_fetch_and
1460
1461template <class _Tp>
1462inline _LIBCPP_INLINE_VISIBILITY
1463typename enable_if
1464<
1465    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1466    _Tp
1467>::type
1468atomic_fetch_and(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1469{
1470    return __o->fetch_and(__op);
1471}
1472
1473template <class _Tp>
1474inline _LIBCPP_INLINE_VISIBILITY
1475typename enable_if
1476<
1477    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1478    _Tp
1479>::type
1480atomic_fetch_and(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1481{
1482    return __o->fetch_and(__op);
1483}
1484
1485// atomic_fetch_and_explicit
1486
1487template <class _Tp>
1488inline _LIBCPP_INLINE_VISIBILITY
1489typename enable_if
1490<
1491    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1492    _Tp
1493>::type
1494atomic_fetch_and_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1495{
1496    return __o->fetch_and(__op, __m);
1497}
1498
1499template <class _Tp>
1500inline _LIBCPP_INLINE_VISIBILITY
1501typename enable_if
1502<
1503    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1504    _Tp
1505>::type
1506atomic_fetch_and_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1507{
1508    return __o->fetch_and(__op, __m);
1509}
1510
1511// atomic_fetch_or
1512
1513template <class _Tp>
1514inline _LIBCPP_INLINE_VISIBILITY
1515typename enable_if
1516<
1517    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1518    _Tp
1519>::type
1520atomic_fetch_or(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1521{
1522    return __o->fetch_or(__op);
1523}
1524
1525template <class _Tp>
1526inline _LIBCPP_INLINE_VISIBILITY
1527typename enable_if
1528<
1529    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1530    _Tp
1531>::type
1532atomic_fetch_or(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1533{
1534    return __o->fetch_or(__op);
1535}
1536
1537// atomic_fetch_or_explicit
1538
1539template <class _Tp>
1540inline _LIBCPP_INLINE_VISIBILITY
1541typename enable_if
1542<
1543    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1544    _Tp
1545>::type
1546atomic_fetch_or_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1547{
1548    return __o->fetch_or(__op, __m);
1549}
1550
1551template <class _Tp>
1552inline _LIBCPP_INLINE_VISIBILITY
1553typename enable_if
1554<
1555    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1556    _Tp
1557>::type
1558atomic_fetch_or_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1559{
1560    return __o->fetch_or(__op, __m);
1561}
1562
1563// atomic_fetch_xor
1564
1565template <class _Tp>
1566inline _LIBCPP_INLINE_VISIBILITY
1567typename enable_if
1568<
1569    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1570    _Tp
1571>::type
1572atomic_fetch_xor(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1573{
1574    return __o->fetch_xor(__op);
1575}
1576
1577template <class _Tp>
1578inline _LIBCPP_INLINE_VISIBILITY
1579typename enable_if
1580<
1581    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1582    _Tp
1583>::type
1584atomic_fetch_xor(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1585{
1586    return __o->fetch_xor(__op);
1587}
1588
1589// atomic_fetch_xor_explicit
1590
1591template <class _Tp>
1592inline _LIBCPP_INLINE_VISIBILITY
1593typename enable_if
1594<
1595    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1596    _Tp
1597>::type
1598atomic_fetch_xor_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1599{
1600    return __o->fetch_xor(__op, __m);
1601}
1602
1603template <class _Tp>
1604inline _LIBCPP_INLINE_VISIBILITY
1605typename enable_if
1606<
1607    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1608    _Tp
1609>::type
1610atomic_fetch_xor_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1611{
1612    return __o->fetch_xor(__op, __m);
1613}
1614
1615// flag type and operations
1616
1617typedef struct atomic_flag
1618{
1619    _Atomic(bool) __a_;
1620
1621    _LIBCPP_INLINE_VISIBILITY
1622    bool test_and_set(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1623        {return __c11_atomic_exchange(&__a_, true, __m);}
1624    _LIBCPP_INLINE_VISIBILITY
1625    bool test_and_set(memory_order __m = memory_order_seq_cst) _NOEXCEPT
1626        {return __c11_atomic_exchange(&__a_, true, __m);}
1627    _LIBCPP_INLINE_VISIBILITY
1628    void clear(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1629        {__c11_atomic_store(&__a_, false, __m);}
1630    _LIBCPP_INLINE_VISIBILITY
1631    void clear(memory_order __m = memory_order_seq_cst) _NOEXCEPT
1632        {__c11_atomic_store(&__a_, false, __m);}
1633
1634    _LIBCPP_INLINE_VISIBILITY
1635#ifndef _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
1636    atomic_flag() _NOEXCEPT = default;
1637#else
1638    atomic_flag() _NOEXCEPT : __a_() {}
1639#endif // _LIBCPP_HAS_NO_DEFAULTED_FUNCTIONS
1640
1641    _LIBCPP_INLINE_VISIBILITY
1642    atomic_flag(bool __b) _NOEXCEPT : __a_(__b) {}
1643
1644#ifndef _LIBCPP_HAS_NO_DELETED_FUNCTIONS
1645    atomic_flag(const atomic_flag&) = delete;
1646    atomic_flag& operator=(const atomic_flag&) = delete;
1647    atomic_flag& operator=(const atomic_flag&) volatile = delete;
1648#else  // _LIBCPP_HAS_NO_DELETED_FUNCTIONS
1649private:
1650    atomic_flag(const atomic_flag&);
1651    atomic_flag& operator=(const atomic_flag&);
1652    atomic_flag& operator=(const atomic_flag&) volatile;
1653#endif  // _LIBCPP_HAS_NO_DELETED_FUNCTIONS
1654} atomic_flag;
1655
1656inline _LIBCPP_INLINE_VISIBILITY
1657bool
1658atomic_flag_test_and_set(volatile atomic_flag* __o) _NOEXCEPT
1659{
1660    return __o->test_and_set();
1661}
1662
1663inline _LIBCPP_INLINE_VISIBILITY
1664bool
1665atomic_flag_test_and_set(atomic_flag* __o) _NOEXCEPT
1666{
1667    return __o->test_and_set();
1668}
1669
1670inline _LIBCPP_INLINE_VISIBILITY
1671bool
1672atomic_flag_test_and_set_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
1673{
1674    return __o->test_and_set(__m);
1675}
1676
1677inline _LIBCPP_INLINE_VISIBILITY
1678bool
1679atomic_flag_test_and_set_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT
1680{
1681    return __o->test_and_set(__m);
1682}
1683
1684inline _LIBCPP_INLINE_VISIBILITY
1685void
1686atomic_flag_clear(volatile atomic_flag* __o) _NOEXCEPT
1687{
1688    __o->clear();
1689}
1690
1691inline _LIBCPP_INLINE_VISIBILITY
1692void
1693atomic_flag_clear(atomic_flag* __o) _NOEXCEPT
1694{
1695    __o->clear();
1696}
1697
1698inline _LIBCPP_INLINE_VISIBILITY
1699void
1700atomic_flag_clear_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
1701{
1702    __o->clear(__m);
1703}
1704
1705inline _LIBCPP_INLINE_VISIBILITY
1706void
1707atomic_flag_clear_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT
1708{
1709    __o->clear(__m);
1710}
1711
1712// fences
1713
1714inline _LIBCPP_INLINE_VISIBILITY
1715void
1716atomic_thread_fence(memory_order __m) _NOEXCEPT
1717{
1718    __c11_atomic_thread_fence(__m);
1719}
1720
1721inline _LIBCPP_INLINE_VISIBILITY
1722void
1723atomic_signal_fence(memory_order __m) _NOEXCEPT
1724{
1725    __c11_atomic_signal_fence(__m);
1726}
1727
1728// Atomics for standard typedef types
1729
1730typedef atomic<bool>               atomic_bool;
1731typedef atomic<char>               atomic_char;
1732typedef atomic<signed char>        atomic_schar;
1733typedef atomic<unsigned char>      atomic_uchar;
1734typedef atomic<short>              atomic_short;
1735typedef atomic<unsigned short>     atomic_ushort;
1736typedef atomic<int>                atomic_int;
1737typedef atomic<unsigned int>       atomic_uint;
1738typedef atomic<long>               atomic_long;
1739typedef atomic<unsigned long>      atomic_ulong;
1740typedef atomic<long long>          atomic_llong;
1741typedef atomic<unsigned long long> atomic_ullong;
1742typedef atomic<char16_t>           atomic_char16_t;
1743typedef atomic<char32_t>           atomic_char32_t;
1744typedef atomic<wchar_t>            atomic_wchar_t;
1745
1746typedef atomic<int_least8_t>   atomic_int_least8_t;
1747typedef atomic<uint_least8_t>  atomic_uint_least8_t;
1748typedef atomic<int_least16_t>  atomic_int_least16_t;
1749typedef atomic<uint_least16_t> atomic_uint_least16_t;
1750typedef atomic<int_least32_t>  atomic_int_least32_t;
1751typedef atomic<uint_least32_t> atomic_uint_least32_t;
1752typedef atomic<int_least64_t>  atomic_int_least64_t;
1753typedef atomic<uint_least64_t> atomic_uint_least64_t;
1754
1755typedef atomic<int_fast8_t>   atomic_int_fast8_t;
1756typedef atomic<uint_fast8_t>  atomic_uint_fast8_t;
1757typedef atomic<int_fast16_t>  atomic_int_fast16_t;
1758typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
1759typedef atomic<int_fast32_t>  atomic_int_fast32_t;
1760typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
1761typedef atomic<int_fast64_t>  atomic_int_fast64_t;
1762typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
1763
1764typedef atomic<intptr_t>  atomic_intptr_t;
1765typedef atomic<uintptr_t> atomic_uintptr_t;
1766typedef atomic<size_t>    atomic_size_t;
1767typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
1768typedef atomic<intmax_t>  atomic_intmax_t;
1769typedef atomic<uintmax_t> atomic_uintmax_t;
1770
1771#define ATOMIC_FLAG_INIT {false}
1772#define ATOMIC_VAR_INIT(__v) {__v}
1773
1774// lock-free property
1775
1776#define ATOMIC_BOOL_LOCK_FREE      __GCC_ATOMIC_BOOL_LOCK_FREE
1777#define ATOMIC_CHAR_LOCK_FREE      __GCC_ATOMIC_CHAR_LOCK_FREE
1778#define ATOMIC_CHAR16_T_LOCK_FREE  __GCC_ATOMIC_CHAR16_T_LOCK_FREE
1779#define ATOMIC_CHAR32_T_LOCK_FREE  __GCC_ATOMIC_CHAR32_T_LOCK_FREE
1780#define ATOMIC_WCHAR_T_LOCK_FREE   __GCC_ATOMIC_WCHAR_T_LOCK_FREE
1781#define ATOMIC_SHORT_LOCK_FREE     __GCC_ATOMIC_SHORT_LOCK_FREE
1782#define ATOMIC_INT_LOCK_FREE       __GCC_ATOMIC_INT_LOCK_FREE
1783#define ATOMIC_LONG_LOCK_FREE      __GCC_ATOMIC_LONG_LOCK_FREE
1784#define ATOMIC_LLONG_LOCK_FREE     __GCC_ATOMIC_LLONG_LOCK_FREE
1785#define ATOMIC_POINTER_LOCK_FREE   __GCC_ATOMIC_POINTER_LOCK_FREE
1786
1787#endif  //  !__has_feature(cxx_atomic)
1788
1789_LIBCPP_END_NAMESPACE_STD
1790
1791#endif  // _LIBCPP_ATOMIC
1792