1// -*- C++ -*-
2//===--------------------------- atomic -----------------------------------===//
3//
4// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
5// See https://llvm.org/LICENSE.txt for license information.
6// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
7//
8//===----------------------------------------------------------------------===//
9
10#ifndef _LIBCPP_ATOMIC
11#define _LIBCPP_ATOMIC
12
13/*
14    atomic synopsis
15
16namespace std
17{
18
19// feature test macro [version.syn]
20
21#define __cpp_lib_atomic_is_always_lock_free
22#define __cpp_lib_atomic_flag_test
23#define __cpp_lib_atomic_lock_free_type_aliases
24#define __cpp_lib_atomic_wait
25
26 // order and consistency
27
28 enum memory_order: unspecified // enum class in C++20
29 {
30    relaxed,
31    consume, // load-consume
32    acquire, // load-acquire
33    release, // store-release
34    acq_rel, // store-release load-acquire
35    seq_cst // store-release load-acquire
36 };
37
38 inline constexpr auto memory_order_relaxed = memory_order::relaxed;
39 inline constexpr auto memory_order_consume = memory_order::consume;
40 inline constexpr auto memory_order_acquire = memory_order::acquire;
41 inline constexpr auto memory_order_release = memory_order::release;
42 inline constexpr auto memory_order_acq_rel = memory_order::acq_rel;
43 inline constexpr auto memory_order_seq_cst = memory_order::seq_cst;
44
45template <class T> T kill_dependency(T y) noexcept;
46
47// lock-free property
48
49#define ATOMIC_BOOL_LOCK_FREE unspecified
50#define ATOMIC_CHAR_LOCK_FREE unspecified
51#define ATOMIC_CHAR8_T_LOCK_FREE unspecified // C++20
52#define ATOMIC_CHAR16_T_LOCK_FREE unspecified
53#define ATOMIC_CHAR32_T_LOCK_FREE unspecified
54#define ATOMIC_WCHAR_T_LOCK_FREE unspecified
55#define ATOMIC_SHORT_LOCK_FREE unspecified
56#define ATOMIC_INT_LOCK_FREE unspecified
57#define ATOMIC_LONG_LOCK_FREE unspecified
58#define ATOMIC_LLONG_LOCK_FREE unspecified
59#define ATOMIC_POINTER_LOCK_FREE unspecified
60
61template <class T>
62struct atomic
63{
64    using value_type = T;
65
66    static constexpr bool is_always_lock_free;
67    bool is_lock_free() const volatile noexcept;
68    bool is_lock_free() const noexcept;
69
70    atomic() noexcept = default;
71    constexpr atomic(T desr) noexcept;
72    atomic(const atomic&) = delete;
73    atomic& operator=(const atomic&) = delete;
74    atomic& operator=(const atomic&) volatile = delete;
75
76    T load(memory_order m = memory_order_seq_cst) const volatile noexcept;
77    T load(memory_order m = memory_order_seq_cst) const noexcept;
78    operator T() const volatile noexcept;
79    operator T() const noexcept;
80    void store(T desr, memory_order m = memory_order_seq_cst) volatile noexcept;
81    void store(T desr, memory_order m = memory_order_seq_cst) noexcept;
82    T operator=(T) volatile noexcept;
83    T operator=(T) noexcept;
84
85    T exchange(T desr, memory_order m = memory_order_seq_cst) volatile noexcept;
86    T exchange(T desr, memory_order m = memory_order_seq_cst) noexcept;
87    bool compare_exchange_weak(T& expc, T desr,
88                               memory_order s, memory_order f) volatile noexcept;
89    bool compare_exchange_weak(T& expc, T desr, memory_order s, memory_order f) noexcept;
90    bool compare_exchange_strong(T& expc, T desr,
91                                 memory_order s, memory_order f) volatile noexcept;
92    bool compare_exchange_strong(T& expc, T desr,
93                                 memory_order s, memory_order f) noexcept;
94    bool compare_exchange_weak(T& expc, T desr,
95                               memory_order m = memory_order_seq_cst) volatile noexcept;
96    bool compare_exchange_weak(T& expc, T desr,
97                               memory_order m = memory_order_seq_cst) noexcept;
98    bool compare_exchange_strong(T& expc, T desr,
99                                memory_order m = memory_order_seq_cst) volatile noexcept;
100    bool compare_exchange_strong(T& expc, T desr,
101                                 memory_order m = memory_order_seq_cst) noexcept;
102
103    void wait(T, memory_order = memory_order::seq_cst) const volatile noexcept;
104    void wait(T, memory_order = memory_order::seq_cst) const noexcept;
105    void notify_one() volatile noexcept;
106    void notify_one() noexcept;
107    void notify_all() volatile noexcept;
108    void notify_all() noexcept;
109};
110
111template <>
112struct atomic<integral>
113{
114    using value_type = integral;
115    using difference_type = value_type;
116
117    static constexpr bool is_always_lock_free;
118    bool is_lock_free() const volatile noexcept;
119    bool is_lock_free() const noexcept;
120
121    atomic() noexcept = default;
122    constexpr atomic(integral desr) noexcept;
123    atomic(const atomic&) = delete;
124    atomic& operator=(const atomic&) = delete;
125    atomic& operator=(const atomic&) volatile = delete;
126
127    integral load(memory_order m = memory_order_seq_cst) const volatile noexcept;
128    integral load(memory_order m = memory_order_seq_cst) const noexcept;
129    operator integral() const volatile noexcept;
130    operator integral() const noexcept;
131    void store(integral desr, memory_order m = memory_order_seq_cst) volatile noexcept;
132    void store(integral desr, memory_order m = memory_order_seq_cst) noexcept;
133    integral operator=(integral desr) volatile noexcept;
134    integral operator=(integral desr) noexcept;
135
136    integral exchange(integral desr,
137                      memory_order m = memory_order_seq_cst) volatile noexcept;
138    integral exchange(integral desr, memory_order m = memory_order_seq_cst) noexcept;
139    bool compare_exchange_weak(integral& expc, integral desr,
140                               memory_order s, memory_order f) volatile noexcept;
141    bool compare_exchange_weak(integral& expc, integral desr,
142                               memory_order s, memory_order f) noexcept;
143    bool compare_exchange_strong(integral& expc, integral desr,
144                                 memory_order s, memory_order f) volatile noexcept;
145    bool compare_exchange_strong(integral& expc, integral desr,
146                                 memory_order s, memory_order f) noexcept;
147    bool compare_exchange_weak(integral& expc, integral desr,
148                               memory_order m = memory_order_seq_cst) volatile noexcept;
149    bool compare_exchange_weak(integral& expc, integral desr,
150                               memory_order m = memory_order_seq_cst) noexcept;
151    bool compare_exchange_strong(integral& expc, integral desr,
152                                memory_order m = memory_order_seq_cst) volatile noexcept;
153    bool compare_exchange_strong(integral& expc, integral desr,
154                                 memory_order m = memory_order_seq_cst) noexcept;
155
156    integral fetch_add(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
157    integral fetch_add(integral op, memory_order m = memory_order_seq_cst) noexcept;
158    integral fetch_sub(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
159    integral fetch_sub(integral op, memory_order m = memory_order_seq_cst) noexcept;
160    integral fetch_and(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
161    integral fetch_and(integral op, memory_order m = memory_order_seq_cst) noexcept;
162    integral fetch_or(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
163    integral fetch_or(integral op, memory_order m = memory_order_seq_cst) noexcept;
164    integral fetch_xor(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
165    integral fetch_xor(integral op, memory_order m = memory_order_seq_cst) noexcept;
166
167    integral operator++(int) volatile noexcept;
168    integral operator++(int) noexcept;
169    integral operator--(int) volatile noexcept;
170    integral operator--(int) noexcept;
171    integral operator++() volatile noexcept;
172    integral operator++() noexcept;
173    integral operator--() volatile noexcept;
174    integral operator--() noexcept;
175    integral operator+=(integral op) volatile noexcept;
176    integral operator+=(integral op) noexcept;
177    integral operator-=(integral op) volatile noexcept;
178    integral operator-=(integral op) noexcept;
179    integral operator&=(integral op) volatile noexcept;
180    integral operator&=(integral op) noexcept;
181    integral operator|=(integral op) volatile noexcept;
182    integral operator|=(integral op) noexcept;
183    integral operator^=(integral op) volatile noexcept;
184    integral operator^=(integral op) noexcept;
185
186    void wait(integral, memory_order = memory_order::seq_cst) const volatile noexcept;
187    void wait(integral, memory_order = memory_order::seq_cst) const noexcept;
188    void notify_one() volatile noexcept;
189    void notify_one() noexcept;
190    void notify_all() volatile noexcept;
191    void notify_all() noexcept;
192};
193
194template <class T>
195struct atomic<T*>
196{
197    using value_type = T*;
198    using difference_type = ptrdiff_t;
199
200    static constexpr bool is_always_lock_free;
201    bool is_lock_free() const volatile noexcept;
202    bool is_lock_free() const noexcept;
203
204    atomic() noexcept = default;
205    constexpr atomic(T* desr) noexcept;
206    atomic(const atomic&) = delete;
207    atomic& operator=(const atomic&) = delete;
208    atomic& operator=(const atomic&) volatile = delete;
209
210    T* load(memory_order m = memory_order_seq_cst) const volatile noexcept;
211    T* load(memory_order m = memory_order_seq_cst) const noexcept;
212    operator T*() const volatile noexcept;
213    operator T*() const noexcept;
214    void store(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept;
215    void store(T* desr, memory_order m = memory_order_seq_cst) noexcept;
216    T* operator=(T*) volatile noexcept;
217    T* operator=(T*) noexcept;
218
219    T* exchange(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept;
220    T* exchange(T* desr, memory_order m = memory_order_seq_cst) noexcept;
221    bool compare_exchange_weak(T*& expc, T* desr,
222                               memory_order s, memory_order f) volatile noexcept;
223    bool compare_exchange_weak(T*& expc, T* desr,
224                               memory_order s, memory_order f) noexcept;
225    bool compare_exchange_strong(T*& expc, T* desr,
226                                 memory_order s, memory_order f) volatile noexcept;
227    bool compare_exchange_strong(T*& expc, T* desr,
228                                 memory_order s, memory_order f) noexcept;
229    bool compare_exchange_weak(T*& expc, T* desr,
230                               memory_order m = memory_order_seq_cst) volatile noexcept;
231    bool compare_exchange_weak(T*& expc, T* desr,
232                               memory_order m = memory_order_seq_cst) noexcept;
233    bool compare_exchange_strong(T*& expc, T* desr,
234                                memory_order m = memory_order_seq_cst) volatile noexcept;
235    bool compare_exchange_strong(T*& expc, T* desr,
236                                 memory_order m = memory_order_seq_cst) noexcept;
237    T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept;
238    T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept;
239    T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept;
240    T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept;
241
242    T* operator++(int) volatile noexcept;
243    T* operator++(int) noexcept;
244    T* operator--(int) volatile noexcept;
245    T* operator--(int) noexcept;
246    T* operator++() volatile noexcept;
247    T* operator++() noexcept;
248    T* operator--() volatile noexcept;
249    T* operator--() noexcept;
250    T* operator+=(ptrdiff_t op) volatile noexcept;
251    T* operator+=(ptrdiff_t op) noexcept;
252    T* operator-=(ptrdiff_t op) volatile noexcept;
253    T* operator-=(ptrdiff_t op) noexcept;
254
255    void wait(T*, memory_order = memory_order::seq_cst) const volatile noexcept;
256    void wait(T*, memory_order = memory_order::seq_cst) const noexcept;
257    void notify_one() volatile noexcept;
258    void notify_one() noexcept;
259    void notify_all() volatile noexcept;
260    void notify_all() noexcept;
261};
262
263
264template <class T>
265  bool atomic_is_lock_free(const volatile atomic<T>* obj) noexcept;
266
267template <class T>
268  bool atomic_is_lock_free(const atomic<T>* obj) noexcept;
269
270template <class T>
271  void atomic_store(volatile atomic<T>* obj, T desr) noexcept;
272
273template <class T>
274  void atomic_store(atomic<T>* obj, T desr) noexcept;
275
276template <class T>
277  void atomic_store_explicit(volatile atomic<T>* obj, T desr, memory_order m) noexcept;
278
279template <class T>
280  void atomic_store_explicit(atomic<T>* obj, T desr, memory_order m) noexcept;
281
282template <class T>
283  T atomic_load(const volatile atomic<T>* obj) noexcept;
284
285template <class T>
286  T atomic_load(const atomic<T>* obj) noexcept;
287
288template <class T>
289  T atomic_load_explicit(const volatile atomic<T>* obj, memory_order m) noexcept;
290
291template <class T>
292  T atomic_load_explicit(const atomic<T>* obj, memory_order m) noexcept;
293
294template <class T>
295  T atomic_exchange(volatile atomic<T>* obj, T desr) noexcept;
296
297template <class T>
298  T atomic_exchange(atomic<T>* obj, T desr) noexcept;
299
300template <class T>
301  T atomic_exchange_explicit(volatile atomic<T>* obj, T desr, memory_order m) noexcept;
302
303template <class T>
304  T atomic_exchange_explicit(atomic<T>* obj, T desr, memory_order m) noexcept;
305
306template <class T>
307  bool atomic_compare_exchange_weak(volatile atomic<T>* obj, T* expc, T desr) noexcept;
308
309template <class T>
310  bool atomic_compare_exchange_weak(atomic<T>* obj, T* expc, T desr) noexcept;
311
312template <class T>
313  bool atomic_compare_exchange_strong(volatile atomic<T>* obj, T* expc, T desr) noexcept;
314
315template <class T>
316  bool atomic_compare_exchange_strong(atomic<T>* obj, T* expc, T desr) noexcept;
317
318template <class T>
319  bool atomic_compare_exchange_weak_explicit(volatile atomic<T>* obj, T* expc,
320                                             T desr,
321                                             memory_order s, memory_order f) noexcept;
322
323template <class T>
324  bool atomic_compare_exchange_weak_explicit(atomic<T>* obj, T* expc, T desr,
325                                             memory_order s, memory_order f) noexcept;
326
327template <class T>
328  bool atomic_compare_exchange_strong_explicit(volatile atomic<T>* obj,
329                                               T* expc, T desr,
330                                               memory_order s, memory_order f) noexcept;
331
332template <class T>
333  bool atomic_compare_exchange_strong_explicit(atomic<T>* obj, T* expc,
334                                               T desr,
335                                               memory_order s, memory_order f) noexcept;
336
337template <class T>
338  void atomic_wait(const volatile atomic<T>* obj, T old) noexcept;
339
340template <class T>
341  void atomic_wait(const atomic<T>* obj, T old) noexcept;
342
343template <class T>
344  void atomic_wait_explicit(const volatile atomic<T>* obj, T old, memory_order m) noexcept;
345
346template <class T>
347  void atomic_wait_explicit(const atomic<T>* obj, T old, memory_order m) noexcept;
348
349template <class T>
350  void atomic_one(volatile atomic<T>* obj) noexcept;
351
352template <class T>
353  void atomic_one(atomic<T>* obj) noexcept;
354
355template <class T>
356  void atomic_all(volatile atomic<T>* obj) noexcept;
357
358template <class T>
359  void atomic_all(atomic<T>* obj) noexcept;
360
361template <class Integral>
362  Integral atomic_fetch_add(volatile atomic<Integral>* obj, Integral op) noexcept;
363
364template <class Integral>
365  Integral atomic_fetch_add(atomic<Integral>* obj, Integral op) noexcept;
366
367template <class Integral>
368  Integral atomic_fetch_add_explicit(volatile atomic<Integral>* obj, Integral op,
369                              memory_order m) noexcept;
370template <class Integral>
371  Integral atomic_fetch_add_explicit(atomic<Integral>* obj, Integral op,
372                              memory_order m) noexcept;
373template <class Integral>
374  Integral atomic_fetch_sub(volatile atomic<Integral>* obj, Integral op) noexcept;
375
376template <class Integral>
377  Integral atomic_fetch_sub(atomic<Integral>* obj, Integral op) noexcept;
378
379template <class Integral>
380  Integral atomic_fetch_sub_explicit(volatile atomic<Integral>* obj, Integral op,
381                                     memory_order m) noexcept;
382
383template <class Integral>
384  Integral atomic_fetch_sub_explicit(atomic<Integral>* obj, Integral op,
385                                     memory_order m) noexcept;
386
387template <class Integral>
388  Integral atomic_fetch_and(volatile atomic<Integral>* obj, Integral op) noexcept;
389
390template <class Integral>
391  Integral atomic_fetch_and(atomic<Integral>* obj, Integral op) noexcept;
392
393template <class Integral>
394  Integral atomic_fetch_and_explicit(volatile atomic<Integral>* obj, Integral op,
395                                     memory_order m) noexcept;
396
397template <class Integral>
398  Integral atomic_fetch_and_explicit(atomic<Integral>* obj, Integral op,
399                                     memory_order m) noexcept;
400
401template <class Integral>
402  Integral atomic_fetch_or(volatile atomic<Integral>* obj, Integral op) noexcept;
403
404template <class Integral>
405  Integral atomic_fetch_or(atomic<Integral>* obj, Integral op) noexcept;
406
407template <class Integral>
408  Integral atomic_fetch_or_explicit(volatile atomic<Integral>* obj, Integral op,
409                             memory_order m) noexcept;
410
411template <class Integral>
412  Integral atomic_fetch_or_explicit(atomic<Integral>* obj, Integral op,
413                             memory_order m) noexcept;
414
415template <class Integral>
416  Integral atomic_fetch_xor(volatile atomic<Integral>* obj, Integral op) noexcept;
417
418template <class Integral>
419  Integral atomic_fetch_xor(atomic<Integral>* obj, Integral op) noexcept;
420
421template <class Integral>
422  Integral atomic_fetch_xor_explicit(volatile atomic<Integral>* obj, Integral op,
423                                     memory_order m) noexcept;
424
425template <class Integral>
426  Integral atomic_fetch_xor_explicit(atomic<Integral>* obj, Integral op,
427                                     memory_order m) noexcept;
428
429template <class T>
430  T* atomic_fetch_add(volatile atomic<T*>* obj, ptrdiff_t op) noexcept;
431
432template <class T>
433  T* atomic_fetch_add(atomic<T*>* obj, ptrdiff_t op) noexcept;
434
435template <class T>
436  T* atomic_fetch_add_explicit(volatile atomic<T*>* obj, ptrdiff_t op,
437                               memory_order m) noexcept;
438
439template <class T>
440  T* atomic_fetch_add_explicit(atomic<T*>* obj, ptrdiff_t op, memory_order m) noexcept;
441
442template <class T>
443  T* atomic_fetch_sub(volatile atomic<T*>* obj, ptrdiff_t op) noexcept;
444
445template <class T>
446  T* atomic_fetch_sub(atomic<T*>* obj, ptrdiff_t op) noexcept;
447
448template <class T>
449  T* atomic_fetch_sub_explicit(volatile atomic<T*>* obj, ptrdiff_t op,
450                               memory_order m) noexcept;
451
452template <class T>
453  T* atomic_fetch_sub_explicit(atomic<T*>* obj, ptrdiff_t op, memory_order m) noexcept;
454
455// Atomics for standard typedef types
456
457typedef atomic<bool>               atomic_bool;
458typedef atomic<char>               atomic_char;
459typedef atomic<signed char>        atomic_schar;
460typedef atomic<unsigned char>      atomic_uchar;
461typedef atomic<short>              atomic_short;
462typedef atomic<unsigned short>     atomic_ushort;
463typedef atomic<int>                atomic_int;
464typedef atomic<unsigned int>       atomic_uint;
465typedef atomic<long>               atomic_long;
466typedef atomic<unsigned long>      atomic_ulong;
467typedef atomic<long long>          atomic_llong;
468typedef atomic<unsigned long long> atomic_ullong;
469typedef atomic<char8_t>            atomic_char8_t; // C++20
470typedef atomic<char16_t>           atomic_char16_t;
471typedef atomic<char32_t>           atomic_char32_t;
472typedef atomic<wchar_t>            atomic_wchar_t;
473
474typedef atomic<int_least8_t>   atomic_int_least8_t;
475typedef atomic<uint_least8_t>  atomic_uint_least8_t;
476typedef atomic<int_least16_t>  atomic_int_least16_t;
477typedef atomic<uint_least16_t> atomic_uint_least16_t;
478typedef atomic<int_least32_t>  atomic_int_least32_t;
479typedef atomic<uint_least32_t> atomic_uint_least32_t;
480typedef atomic<int_least64_t>  atomic_int_least64_t;
481typedef atomic<uint_least64_t> atomic_uint_least64_t;
482
483typedef atomic<int_fast8_t>   atomic_int_fast8_t;
484typedef atomic<uint_fast8_t>  atomic_uint_fast8_t;
485typedef atomic<int_fast16_t>  atomic_int_fast16_t;
486typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
487typedef atomic<int_fast32_t>  atomic_int_fast32_t;
488typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
489typedef atomic<int_fast64_t>  atomic_int_fast64_t;
490typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
491
492typedef atomic<int8_t>   atomic_int8_t;
493typedef atomic<uint8_t>  atomic_uint8_t;
494typedef atomic<int16_t>  atomic_int16_t;
495typedef atomic<uint16_t> atomic_uint16_t;
496typedef atomic<int32_t>  atomic_int32_t;
497typedef atomic<uint32_t> atomic_uint32_t;
498typedef atomic<int64_t>  atomic_int64_t;
499typedef atomic<uint64_t> atomic_uint64_t;
500
501typedef atomic<intptr_t>  atomic_intptr_t;
502typedef atomic<uintptr_t> atomic_uintptr_t;
503typedef atomic<size_t>    atomic_size_t;
504typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
505typedef atomic<intmax_t>  atomic_intmax_t;
506typedef atomic<uintmax_t> atomic_uintmax_t;
507
508// flag type and operations
509
510typedef struct atomic_flag
511{
512    atomic_flag() noexcept = default;
513    atomic_flag(const atomic_flag&) = delete;
514    atomic_flag& operator=(const atomic_flag&) = delete;
515    atomic_flag& operator=(const atomic_flag&) volatile = delete;
516
517    bool test(memory_order m = memory_order_seq_cst) volatile noexcept;
518    bool test(memory_order m = memory_order_seq_cst) noexcept;
519    bool test_and_set(memory_order m = memory_order_seq_cst) volatile noexcept;
520    bool test_and_set(memory_order m = memory_order_seq_cst) noexcept;
521    void clear(memory_order m = memory_order_seq_cst) volatile noexcept;
522    void clear(memory_order m = memory_order_seq_cst) noexcept;
523
524    void wait(bool, memory_order = memory_order::seq_cst) const volatile noexcept;
525    void wait(bool, memory_order = memory_order::seq_cst) const noexcept;
526    void notify_one() volatile noexcept;
527    void notify_one() noexcept;
528    void notify_all() volatile noexcept;
529    void notify_all() noexcept;
530} atomic_flag;
531
532bool atomic_flag_test(volatile atomic_flag* obj) noexcept;
533bool atomic_flag_test(atomic_flag* obj) noexcept;
534bool atomic_flag_test_explicit(volatile atomic_flag* obj,
535                               memory_order m) noexcept;
536bool atomic_flag_test_explicit(atomic_flag* obj, memory_order m) noexcept;
537bool atomic_flag_test_and_set(volatile atomic_flag* obj) noexcept;
538bool atomic_flag_test_and_set(atomic_flag* obj) noexcept;
539bool atomic_flag_test_and_set_explicit(volatile atomic_flag* obj,
540                                       memory_order m) noexcept;
541bool atomic_flag_test_and_set_explicit(atomic_flag* obj, memory_order m) noexcept;
542void atomic_flag_clear(volatile atomic_flag* obj) noexcept;
543void atomic_flag_clear(atomic_flag* obj) noexcept;
544void atomic_flag_clear_explicit(volatile atomic_flag* obj, memory_order m) noexcept;
545void atomic_flag_clear_explicit(atomic_flag* obj, memory_order m) noexcept;
546
547void atomic_wait(const volatile atomic_flag* obj, T old) noexcept;
548void atomic_wait(const atomic_flag* obj, T old) noexcept;
549void atomic_wait_explicit(const volatile atomic_flag* obj, T old, memory_order m) noexcept;
550void atomic_wait_explicit(const atomic_flag* obj, T old, memory_order m) noexcept;
551void atomic_one(volatile atomic_flag* obj) noexcept;
552void atomic_one(atomic_flag* obj) noexcept;
553void atomic_all(volatile atomic_flag* obj) noexcept;
554void atomic_all(atomic_flag* obj) noexcept;
555
556// fences
557
558void atomic_thread_fence(memory_order m) noexcept;
559void atomic_signal_fence(memory_order m) noexcept;
560
561// deprecated
562
563template <class T>
564  void atomic_init(volatile atomic<T>* obj, typename atomic<T>::value_type desr) noexcept;
565
566template <class T>
567  void atomic_init(atomic<T>* obj, typename atomic<T>::value_type desr) noexcept;
568
569#define ATOMIC_VAR_INIT(value) see below
570
571#define ATOMIC_FLAG_INIT see below
572
573}  // std
574
575*/
576
577#include <__config>
578#include <__availability>
579#include <__threading_support>
580#include <cstddef>
581#include <cstdint>
582#include <cstring>
583#include <type_traits>
584#include <version>
585
586#if !defined(_LIBCPP_HAS_NO_PRAGMA_SYSTEM_HEADER)
587#pragma GCC system_header
588#endif
589
590#ifdef _LIBCPP_HAS_NO_THREADS
591# error <atomic> is not supported on this single threaded system
592#endif
593#ifdef _LIBCPP_HAS_NO_ATOMIC_HEADER
594# error <atomic> is not implemented
595#endif
596#ifdef kill_dependency
597# error C++ standard library is incompatible with <stdatomic.h>
598#endif
599
600#define _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m) \
601  _LIBCPP_DIAGNOSE_WARNING(__m == memory_order_consume || \
602                           __m == memory_order_acquire || \
603                           __m == memory_order_acq_rel,   \
604                        "memory order argument to atomic operation is invalid")
605
606#define _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m) \
607  _LIBCPP_DIAGNOSE_WARNING(__m == memory_order_release || \
608                           __m == memory_order_acq_rel,   \
609                        "memory order argument to atomic operation is invalid")
610
611#define _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__m, __f) \
612  _LIBCPP_DIAGNOSE_WARNING(__f == memory_order_release || \
613                           __f == memory_order_acq_rel,   \
614                        "memory order argument to atomic operation is invalid")
615
616_LIBCPP_BEGIN_NAMESPACE_STD
617
618// Figure out what the underlying type for `memory_order` would be if it were
619// declared as an unscoped enum (accounting for -fshort-enums). Use this result
620// to pin the underlying type in C++20.
621enum __legacy_memory_order {
622    __mo_relaxed,
623    __mo_consume,
624    __mo_acquire,
625    __mo_release,
626    __mo_acq_rel,
627    __mo_seq_cst
628};
629
630typedef underlying_type<__legacy_memory_order>::type __memory_order_underlying_t;
631
632#if _LIBCPP_STD_VER > 17
633
634enum class memory_order : __memory_order_underlying_t {
635  relaxed = __mo_relaxed,
636  consume = __mo_consume,
637  acquire = __mo_acquire,
638  release = __mo_release,
639  acq_rel = __mo_acq_rel,
640  seq_cst = __mo_seq_cst
641};
642
643inline constexpr auto memory_order_relaxed = memory_order::relaxed;
644inline constexpr auto memory_order_consume = memory_order::consume;
645inline constexpr auto memory_order_acquire = memory_order::acquire;
646inline constexpr auto memory_order_release = memory_order::release;
647inline constexpr auto memory_order_acq_rel = memory_order::acq_rel;
648inline constexpr auto memory_order_seq_cst = memory_order::seq_cst;
649
650#else
651
652typedef enum memory_order {
653  memory_order_relaxed = __mo_relaxed,
654  memory_order_consume = __mo_consume,
655  memory_order_acquire = __mo_acquire,
656  memory_order_release = __mo_release,
657  memory_order_acq_rel = __mo_acq_rel,
658  memory_order_seq_cst = __mo_seq_cst,
659} memory_order;
660
661#endif // _LIBCPP_STD_VER > 17
662
663template <typename _Tp> _LIBCPP_INLINE_VISIBILITY
664bool __cxx_nonatomic_compare_equal(_Tp const& __lhs, _Tp const& __rhs) {
665    return memcmp(&__lhs, &__rhs, sizeof(_Tp)) == 0;
666}
667
668static_assert((is_same<underlying_type<memory_order>::type, __memory_order_underlying_t>::value),
669  "unexpected underlying type for std::memory_order");
670
671#if defined(_LIBCPP_HAS_GCC_ATOMIC_IMP) || \
672	defined(_LIBCPP_ATOMIC_ONLY_USE_BUILTINS)
673
674// [atomics.types.generic]p1 guarantees _Tp is trivially copyable. Because
675// the default operator= in an object is not volatile, a byte-by-byte copy
676// is required.
677template <typename _Tp, typename _Tv> _LIBCPP_INLINE_VISIBILITY
678typename enable_if<is_assignable<_Tp&, _Tv>::value>::type
679__cxx_atomic_assign_volatile(_Tp& __a_value, _Tv const& __val) {
680  __a_value = __val;
681}
682template <typename _Tp, typename _Tv> _LIBCPP_INLINE_VISIBILITY
683typename enable_if<is_assignable<_Tp&, _Tv>::value>::type
684__cxx_atomic_assign_volatile(_Tp volatile& __a_value, _Tv volatile const& __val) {
685  volatile char* __to = reinterpret_cast<volatile char*>(&__a_value);
686  volatile char* __end = __to + sizeof(_Tp);
687  volatile const char* __from = reinterpret_cast<volatile const char*>(&__val);
688  while (__to != __end)
689    *__to++ = *__from++;
690}
691
692#endif
693
694#if defined(_LIBCPP_HAS_GCC_ATOMIC_IMP)
695
696template <typename _Tp>
697struct __cxx_atomic_base_impl {
698
699  _LIBCPP_INLINE_VISIBILITY
700#ifndef _LIBCPP_CXX03_LANG
701    __cxx_atomic_base_impl() _NOEXCEPT = default;
702#else
703    __cxx_atomic_base_impl() _NOEXCEPT : __a_value() {}
704#endif // _LIBCPP_CXX03_LANG
705  _LIBCPP_CONSTEXPR explicit __cxx_atomic_base_impl(_Tp value) _NOEXCEPT
706    : __a_value(value) {}
707  _Tp __a_value;
708};
709
710_LIBCPP_INLINE_VISIBILITY inline _LIBCPP_CONSTEXPR int __to_gcc_order(memory_order __order) {
711  // Avoid switch statement to make this a constexpr.
712  return __order == memory_order_relaxed ? __ATOMIC_RELAXED:
713         (__order == memory_order_acquire ? __ATOMIC_ACQUIRE:
714          (__order == memory_order_release ? __ATOMIC_RELEASE:
715           (__order == memory_order_seq_cst ? __ATOMIC_SEQ_CST:
716            (__order == memory_order_acq_rel ? __ATOMIC_ACQ_REL:
717              __ATOMIC_CONSUME))));
718}
719
720_LIBCPP_INLINE_VISIBILITY inline _LIBCPP_CONSTEXPR int __to_gcc_failure_order(memory_order __order) {
721  // Avoid switch statement to make this a constexpr.
722  return __order == memory_order_relaxed ? __ATOMIC_RELAXED:
723         (__order == memory_order_acquire ? __ATOMIC_ACQUIRE:
724          (__order == memory_order_release ? __ATOMIC_RELAXED:
725           (__order == memory_order_seq_cst ? __ATOMIC_SEQ_CST:
726            (__order == memory_order_acq_rel ? __ATOMIC_ACQUIRE:
727              __ATOMIC_CONSUME))));
728}
729
730template <typename _Tp>
731_LIBCPP_INLINE_VISIBILITY
732void __cxx_atomic_init(volatile __cxx_atomic_base_impl<_Tp>* __a,  _Tp __val) {
733  __cxx_atomic_assign_volatile(__a->__a_value, __val);
734}
735
736template <typename _Tp>
737_LIBCPP_INLINE_VISIBILITY
738void __cxx_atomic_init(__cxx_atomic_base_impl<_Tp>* __a,  _Tp __val) {
739  __a->__a_value = __val;
740}
741
742_LIBCPP_INLINE_VISIBILITY inline
743void __cxx_atomic_thread_fence(memory_order __order) {
744  __atomic_thread_fence(__to_gcc_order(__order));
745}
746
747_LIBCPP_INLINE_VISIBILITY inline
748void __cxx_atomic_signal_fence(memory_order __order) {
749  __atomic_signal_fence(__to_gcc_order(__order));
750}
751
752template <typename _Tp>
753_LIBCPP_INLINE_VISIBILITY
754void __cxx_atomic_store(volatile __cxx_atomic_base_impl<_Tp>* __a,  _Tp __val,
755                        memory_order __order) {
756  __atomic_store(&__a->__a_value, &__val,
757                 __to_gcc_order(__order));
758}
759
760template <typename _Tp>
761_LIBCPP_INLINE_VISIBILITY
762void __cxx_atomic_store(__cxx_atomic_base_impl<_Tp>* __a,  _Tp __val,
763                        memory_order __order) {
764  __atomic_store(&__a->__a_value, &__val,
765                 __to_gcc_order(__order));
766}
767
768template <typename _Tp>
769_LIBCPP_INLINE_VISIBILITY
770_Tp __cxx_atomic_load(const volatile __cxx_atomic_base_impl<_Tp>* __a,
771                      memory_order __order) {
772  _Tp __ret;
773  __atomic_load(&__a->__a_value, &__ret,
774                __to_gcc_order(__order));
775  return __ret;
776}
777
778template <typename _Tp>
779_LIBCPP_INLINE_VISIBILITY
780_Tp __cxx_atomic_load(const __cxx_atomic_base_impl<_Tp>* __a, memory_order __order) {
781  _Tp __ret;
782  __atomic_load(&__a->__a_value, &__ret,
783                __to_gcc_order(__order));
784  return __ret;
785}
786
787template <typename _Tp>
788_LIBCPP_INLINE_VISIBILITY
789_Tp __cxx_atomic_exchange(volatile __cxx_atomic_base_impl<_Tp>* __a,
790                          _Tp __value, memory_order __order) {
791  _Tp __ret;
792  __atomic_exchange(&__a->__a_value, &__value, &__ret,
793                    __to_gcc_order(__order));
794  return __ret;
795}
796
797template <typename _Tp>
798_LIBCPP_INLINE_VISIBILITY
799_Tp __cxx_atomic_exchange(__cxx_atomic_base_impl<_Tp>* __a, _Tp __value,
800                          memory_order __order) {
801  _Tp __ret;
802  __atomic_exchange(&__a->__a_value, &__value, &__ret,
803                    __to_gcc_order(__order));
804  return __ret;
805}
806
807template <typename _Tp>
808_LIBCPP_INLINE_VISIBILITY
809bool __cxx_atomic_compare_exchange_strong(
810    volatile __cxx_atomic_base_impl<_Tp>* __a, _Tp* __expected, _Tp __value,
811    memory_order __success, memory_order __failure) {
812  return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
813                                   false,
814                                   __to_gcc_order(__success),
815                                   __to_gcc_failure_order(__failure));
816}
817
818template <typename _Tp>
819_LIBCPP_INLINE_VISIBILITY
820bool __cxx_atomic_compare_exchange_strong(
821    __cxx_atomic_base_impl<_Tp>* __a, _Tp* __expected, _Tp __value, memory_order __success,
822    memory_order __failure) {
823  return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
824                                   false,
825                                   __to_gcc_order(__success),
826                                   __to_gcc_failure_order(__failure));
827}
828
829template <typename _Tp>
830_LIBCPP_INLINE_VISIBILITY
831bool __cxx_atomic_compare_exchange_weak(
832    volatile __cxx_atomic_base_impl<_Tp>* __a, _Tp* __expected, _Tp __value,
833    memory_order __success, memory_order __failure) {
834  return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
835                                   true,
836                                   __to_gcc_order(__success),
837                                   __to_gcc_failure_order(__failure));
838}
839
840template <typename _Tp>
841_LIBCPP_INLINE_VISIBILITY
842bool __cxx_atomic_compare_exchange_weak(
843    __cxx_atomic_base_impl<_Tp>* __a, _Tp* __expected, _Tp __value, memory_order __success,
844    memory_order __failure) {
845  return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
846                                   true,
847                                   __to_gcc_order(__success),
848                                   __to_gcc_failure_order(__failure));
849}
850
851template <typename _Tp>
852struct __skip_amt { enum {value = 1}; };
853
854template <typename _Tp>
855struct __skip_amt<_Tp*> { enum {value = sizeof(_Tp)}; };
856
857// FIXME: Haven't figured out what the spec says about using arrays with
858// atomic_fetch_add. Force a failure rather than creating bad behavior.
859template <typename _Tp>
860struct __skip_amt<_Tp[]> { };
861template <typename _Tp, int n>
862struct __skip_amt<_Tp[n]> { };
863
864template <typename _Tp, typename _Td>
865_LIBCPP_INLINE_VISIBILITY
866_Tp __cxx_atomic_fetch_add(volatile __cxx_atomic_base_impl<_Tp>* __a,
867                           _Td __delta, memory_order __order) {
868  return __atomic_fetch_add(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
869                            __to_gcc_order(__order));
870}
871
872template <typename _Tp, typename _Td>
873_LIBCPP_INLINE_VISIBILITY
874_Tp __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp>* __a, _Td __delta,
875                           memory_order __order) {
876  return __atomic_fetch_add(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
877                            __to_gcc_order(__order));
878}
879
880template <typename _Tp, typename _Td>
881_LIBCPP_INLINE_VISIBILITY
882_Tp __cxx_atomic_fetch_sub(volatile __cxx_atomic_base_impl<_Tp>* __a,
883                           _Td __delta, memory_order __order) {
884  return __atomic_fetch_sub(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
885                            __to_gcc_order(__order));
886}
887
888template <typename _Tp, typename _Td>
889_LIBCPP_INLINE_VISIBILITY
890_Tp __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp>* __a, _Td __delta,
891                           memory_order __order) {
892  return __atomic_fetch_sub(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
893                            __to_gcc_order(__order));
894}
895
896template <typename _Tp>
897_LIBCPP_INLINE_VISIBILITY
898_Tp __cxx_atomic_fetch_and(volatile __cxx_atomic_base_impl<_Tp>* __a,
899                           _Tp __pattern, memory_order __order) {
900  return __atomic_fetch_and(&__a->__a_value, __pattern,
901                            __to_gcc_order(__order));
902}
903
904template <typename _Tp>
905_LIBCPP_INLINE_VISIBILITY
906_Tp __cxx_atomic_fetch_and(__cxx_atomic_base_impl<_Tp>* __a,
907                           _Tp __pattern, memory_order __order) {
908  return __atomic_fetch_and(&__a->__a_value, __pattern,
909                            __to_gcc_order(__order));
910}
911
912template <typename _Tp>
913_LIBCPP_INLINE_VISIBILITY
914_Tp __cxx_atomic_fetch_or(volatile __cxx_atomic_base_impl<_Tp>* __a,
915                          _Tp __pattern, memory_order __order) {
916  return __atomic_fetch_or(&__a->__a_value, __pattern,
917                           __to_gcc_order(__order));
918}
919
920template <typename _Tp>
921_LIBCPP_INLINE_VISIBILITY
922_Tp __cxx_atomic_fetch_or(__cxx_atomic_base_impl<_Tp>* __a, _Tp __pattern,
923                          memory_order __order) {
924  return __atomic_fetch_or(&__a->__a_value, __pattern,
925                           __to_gcc_order(__order));
926}
927
928template <typename _Tp>
929_LIBCPP_INLINE_VISIBILITY
930_Tp __cxx_atomic_fetch_xor(volatile __cxx_atomic_base_impl<_Tp>* __a,
931                           _Tp __pattern, memory_order __order) {
932  return __atomic_fetch_xor(&__a->__a_value, __pattern,
933                            __to_gcc_order(__order));
934}
935
936template <typename _Tp>
937_LIBCPP_INLINE_VISIBILITY
938_Tp __cxx_atomic_fetch_xor(__cxx_atomic_base_impl<_Tp>* __a, _Tp __pattern,
939                           memory_order __order) {
940  return __atomic_fetch_xor(&__a->__a_value, __pattern,
941                            __to_gcc_order(__order));
942}
943
944#define __cxx_atomic_is_lock_free(__s) __atomic_is_lock_free(__s, 0)
945
946#elif defined(_LIBCPP_HAS_C_ATOMIC_IMP)
947
948template <typename _Tp>
949struct __cxx_atomic_base_impl {
950
951  _LIBCPP_INLINE_VISIBILITY
952#ifndef _LIBCPP_CXX03_LANG
953    __cxx_atomic_base_impl() _NOEXCEPT = default;
954#else
955    __cxx_atomic_base_impl() _NOEXCEPT : __a_value() {}
956#endif // _LIBCPP_CXX03_LANG
957  _LIBCPP_CONSTEXPR explicit __cxx_atomic_base_impl(_Tp value) _NOEXCEPT
958    : __a_value(value) {}
959  _LIBCPP_DISABLE_EXTENSION_WARNING _Atomic(_Tp) __a_value;
960};
961
962#define __cxx_atomic_is_lock_free(__s) __c11_atomic_is_lock_free(__s)
963
964_LIBCPP_INLINE_VISIBILITY inline
965void __cxx_atomic_thread_fence(memory_order __order) _NOEXCEPT {
966    __c11_atomic_thread_fence(static_cast<__memory_order_underlying_t>(__order));
967}
968
969_LIBCPP_INLINE_VISIBILITY inline
970void __cxx_atomic_signal_fence(memory_order __order) _NOEXCEPT {
971    __c11_atomic_signal_fence(static_cast<__memory_order_underlying_t>(__order));
972}
973
974template<class _Tp>
975_LIBCPP_INLINE_VISIBILITY
976void __cxx_atomic_init(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __val) _NOEXCEPT {
977    __c11_atomic_init(&__a->__a_value, __val);
978}
979template<class _Tp>
980_LIBCPP_INLINE_VISIBILITY
981void __cxx_atomic_init(__cxx_atomic_base_impl<_Tp> * __a, _Tp __val) _NOEXCEPT {
982    __c11_atomic_init(&__a->__a_value, __val);
983}
984
985template<class _Tp>
986_LIBCPP_INLINE_VISIBILITY
987void __cxx_atomic_store(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __val, memory_order __order) _NOEXCEPT {
988    __c11_atomic_store(&__a->__a_value, __val, static_cast<__memory_order_underlying_t>(__order));
989}
990template<class _Tp>
991_LIBCPP_INLINE_VISIBILITY
992void __cxx_atomic_store(__cxx_atomic_base_impl<_Tp> * __a, _Tp __val, memory_order __order) _NOEXCEPT {
993    __c11_atomic_store(&__a->__a_value, __val, static_cast<__memory_order_underlying_t>(__order));
994}
995
996template<class _Tp>
997_LIBCPP_INLINE_VISIBILITY
998_Tp __cxx_atomic_load(__cxx_atomic_base_impl<_Tp> const volatile* __a, memory_order __order) _NOEXCEPT {
999    using __ptr_type = typename remove_const<decltype(__a->__a_value)>::type*;
1000    return __c11_atomic_load(const_cast<__ptr_type>(&__a->__a_value), static_cast<__memory_order_underlying_t>(__order));
1001}
1002template<class _Tp>
1003_LIBCPP_INLINE_VISIBILITY
1004_Tp __cxx_atomic_load(__cxx_atomic_base_impl<_Tp> const* __a, memory_order __order) _NOEXCEPT {
1005    using __ptr_type = typename remove_const<decltype(__a->__a_value)>::type*;
1006    return __c11_atomic_load(const_cast<__ptr_type>(&__a->__a_value), static_cast<__memory_order_underlying_t>(__order));
1007}
1008
1009template<class _Tp>
1010_LIBCPP_INLINE_VISIBILITY
1011_Tp __cxx_atomic_exchange(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __value, memory_order __order) _NOEXCEPT {
1012    return __c11_atomic_exchange(&__a->__a_value, __value, static_cast<__memory_order_underlying_t>(__order));
1013}
1014template<class _Tp>
1015_LIBCPP_INLINE_VISIBILITY
1016_Tp __cxx_atomic_exchange(__cxx_atomic_base_impl<_Tp> * __a, _Tp __value, memory_order __order) _NOEXCEPT {
1017    return __c11_atomic_exchange(&__a->__a_value, __value, static_cast<__memory_order_underlying_t>(__order));
1018}
1019
1020template<class _Tp>
1021_LIBCPP_INLINE_VISIBILITY
1022bool __cxx_atomic_compare_exchange_strong(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) _NOEXCEPT {
1023    return __c11_atomic_compare_exchange_strong(&__a->__a_value, __expected, __value, static_cast<__memory_order_underlying_t>(__success), static_cast<__memory_order_underlying_t>(__failure));
1024}
1025template<class _Tp>
1026_LIBCPP_INLINE_VISIBILITY
1027bool __cxx_atomic_compare_exchange_strong(__cxx_atomic_base_impl<_Tp> * __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) _NOEXCEPT {
1028    return __c11_atomic_compare_exchange_strong(&__a->__a_value, __expected, __value, static_cast<__memory_order_underlying_t>(__success), static_cast<__memory_order_underlying_t>(__failure));
1029}
1030
1031template<class _Tp>
1032_LIBCPP_INLINE_VISIBILITY
1033bool __cxx_atomic_compare_exchange_weak(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) _NOEXCEPT {
1034    return __c11_atomic_compare_exchange_weak(&__a->__a_value, __expected, __value, static_cast<__memory_order_underlying_t>(__success), static_cast<__memory_order_underlying_t>(__failure));
1035}
1036template<class _Tp>
1037_LIBCPP_INLINE_VISIBILITY
1038bool __cxx_atomic_compare_exchange_weak(__cxx_atomic_base_impl<_Tp> * __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) _NOEXCEPT {
1039    return __c11_atomic_compare_exchange_weak(&__a->__a_value, __expected, __value,  static_cast<__memory_order_underlying_t>(__success), static_cast<__memory_order_underlying_t>(__failure));
1040}
1041
1042template<class _Tp>
1043_LIBCPP_INLINE_VISIBILITY
1044_Tp __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __delta, memory_order __order) _NOEXCEPT {
1045    return __c11_atomic_fetch_add(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
1046}
1047template<class _Tp>
1048_LIBCPP_INLINE_VISIBILITY
1049_Tp __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp> * __a, _Tp __delta, memory_order __order) _NOEXCEPT {
1050    return __c11_atomic_fetch_add(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
1051}
1052
1053template<class _Tp>
1054_LIBCPP_INLINE_VISIBILITY
1055_Tp* __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp*> volatile* __a, ptrdiff_t __delta, memory_order __order) _NOEXCEPT {
1056    return __c11_atomic_fetch_add(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
1057}
1058template<class _Tp>
1059_LIBCPP_INLINE_VISIBILITY
1060_Tp* __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp*> * __a, ptrdiff_t __delta, memory_order __order) _NOEXCEPT {
1061    return __c11_atomic_fetch_add(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
1062}
1063
1064template<class _Tp>
1065_LIBCPP_INLINE_VISIBILITY
1066_Tp __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __delta, memory_order __order) _NOEXCEPT {
1067    return __c11_atomic_fetch_sub(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
1068}
1069template<class _Tp>
1070_LIBCPP_INLINE_VISIBILITY
1071_Tp __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp> * __a, _Tp __delta, memory_order __order) _NOEXCEPT {
1072    return __c11_atomic_fetch_sub(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
1073}
1074template<class _Tp>
1075_LIBCPP_INLINE_VISIBILITY
1076_Tp* __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp*> volatile* __a, ptrdiff_t __delta, memory_order __order) _NOEXCEPT {
1077    return __c11_atomic_fetch_sub(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
1078}
1079template<class _Tp>
1080_LIBCPP_INLINE_VISIBILITY
1081_Tp* __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp*> * __a, ptrdiff_t __delta, memory_order __order) _NOEXCEPT {
1082    return __c11_atomic_fetch_sub(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
1083}
1084
1085template<class _Tp>
1086_LIBCPP_INLINE_VISIBILITY
1087_Tp __cxx_atomic_fetch_and(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
1088    return __c11_atomic_fetch_and(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order));
1089}
1090template<class _Tp>
1091_LIBCPP_INLINE_VISIBILITY
1092_Tp __cxx_atomic_fetch_and(__cxx_atomic_base_impl<_Tp> * __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
1093    return __c11_atomic_fetch_and(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order));
1094}
1095
1096template<class _Tp>
1097_LIBCPP_INLINE_VISIBILITY
1098_Tp __cxx_atomic_fetch_or(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
1099    return __c11_atomic_fetch_or(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order));
1100}
1101template<class _Tp>
1102_LIBCPP_INLINE_VISIBILITY
1103_Tp __cxx_atomic_fetch_or(__cxx_atomic_base_impl<_Tp> * __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
1104    return __c11_atomic_fetch_or(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order));
1105}
1106
1107template<class _Tp>
1108_LIBCPP_INLINE_VISIBILITY
1109_Tp __cxx_atomic_fetch_xor(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
1110    return __c11_atomic_fetch_xor(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order));
1111}
1112template<class _Tp>
1113_LIBCPP_INLINE_VISIBILITY
1114_Tp __cxx_atomic_fetch_xor(__cxx_atomic_base_impl<_Tp> * __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
1115    return __c11_atomic_fetch_xor(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order));
1116}
1117
1118#endif // _LIBCPP_HAS_GCC_ATOMIC_IMP, _LIBCPP_HAS_C_ATOMIC_IMP
1119
1120template <class _Tp>
1121_LIBCPP_INLINE_VISIBILITY
1122_Tp kill_dependency(_Tp __y) _NOEXCEPT
1123{
1124    return __y;
1125}
1126
1127#if defined(__CLANG_ATOMIC_BOOL_LOCK_FREE)
1128# define ATOMIC_BOOL_LOCK_FREE      __CLANG_ATOMIC_BOOL_LOCK_FREE
1129# define ATOMIC_CHAR_LOCK_FREE      __CLANG_ATOMIC_CHAR_LOCK_FREE
1130#ifndef _LIBCPP_NO_HAS_CHAR8_T
1131# define ATOMIC_CHAR8_T_LOCK_FREE   __CLANG_ATOMIC_CHAR8_T_LOCK_FREE
1132#endif
1133# define ATOMIC_CHAR16_T_LOCK_FREE  __CLANG_ATOMIC_CHAR16_T_LOCK_FREE
1134# define ATOMIC_CHAR32_T_LOCK_FREE  __CLANG_ATOMIC_CHAR32_T_LOCK_FREE
1135# define ATOMIC_WCHAR_T_LOCK_FREE   __CLANG_ATOMIC_WCHAR_T_LOCK_FREE
1136# define ATOMIC_SHORT_LOCK_FREE     __CLANG_ATOMIC_SHORT_LOCK_FREE
1137# define ATOMIC_INT_LOCK_FREE       __CLANG_ATOMIC_INT_LOCK_FREE
1138# define ATOMIC_LONG_LOCK_FREE      __CLANG_ATOMIC_LONG_LOCK_FREE
1139# define ATOMIC_LLONG_LOCK_FREE     __CLANG_ATOMIC_LLONG_LOCK_FREE
1140# define ATOMIC_POINTER_LOCK_FREE   __CLANG_ATOMIC_POINTER_LOCK_FREE
1141#elif defined(__GCC_ATOMIC_BOOL_LOCK_FREE)
1142# define ATOMIC_BOOL_LOCK_FREE      __GCC_ATOMIC_BOOL_LOCK_FREE
1143# define ATOMIC_CHAR_LOCK_FREE      __GCC_ATOMIC_CHAR_LOCK_FREE
1144#ifndef _LIBCPP_NO_HAS_CHAR8_T
1145# define ATOMIC_CHAR8_T_LOCK_FREE   __GCC_ATOMIC_CHAR8_T_LOCK_FREE
1146#endif
1147# define ATOMIC_CHAR16_T_LOCK_FREE  __GCC_ATOMIC_CHAR16_T_LOCK_FREE
1148# define ATOMIC_CHAR32_T_LOCK_FREE  __GCC_ATOMIC_CHAR32_T_LOCK_FREE
1149# define ATOMIC_WCHAR_T_LOCK_FREE   __GCC_ATOMIC_WCHAR_T_LOCK_FREE
1150# define ATOMIC_SHORT_LOCK_FREE     __GCC_ATOMIC_SHORT_LOCK_FREE
1151# define ATOMIC_INT_LOCK_FREE       __GCC_ATOMIC_INT_LOCK_FREE
1152# define ATOMIC_LONG_LOCK_FREE      __GCC_ATOMIC_LONG_LOCK_FREE
1153# define ATOMIC_LLONG_LOCK_FREE     __GCC_ATOMIC_LLONG_LOCK_FREE
1154# define ATOMIC_POINTER_LOCK_FREE   __GCC_ATOMIC_POINTER_LOCK_FREE
1155#endif
1156
1157#ifdef _LIBCPP_ATOMIC_ONLY_USE_BUILTINS
1158
1159template<typename _Tp>
1160struct __cxx_atomic_lock_impl {
1161
1162  _LIBCPP_INLINE_VISIBILITY
1163  __cxx_atomic_lock_impl() _NOEXCEPT
1164    : __a_value(), __a_lock(0) {}
1165  _LIBCPP_INLINE_VISIBILITY _LIBCPP_CONSTEXPR explicit
1166  __cxx_atomic_lock_impl(_Tp value) _NOEXCEPT
1167    : __a_value(value), __a_lock(0) {}
1168
1169  _Tp __a_value;
1170  mutable __cxx_atomic_base_impl<_LIBCPP_ATOMIC_FLAG_TYPE> __a_lock;
1171
1172  _LIBCPP_INLINE_VISIBILITY void __lock() const volatile {
1173    while(1 == __cxx_atomic_exchange(&__a_lock, _LIBCPP_ATOMIC_FLAG_TYPE(true), memory_order_acquire))
1174        /*spin*/;
1175  }
1176  _LIBCPP_INLINE_VISIBILITY void __lock() const {
1177    while(1 == __cxx_atomic_exchange(&__a_lock, _LIBCPP_ATOMIC_FLAG_TYPE(true), memory_order_acquire))
1178        /*spin*/;
1179  }
1180  _LIBCPP_INLINE_VISIBILITY void __unlock() const volatile {
1181    __cxx_atomic_store(&__a_lock, _LIBCPP_ATOMIC_FLAG_TYPE(false), memory_order_release);
1182  }
1183  _LIBCPP_INLINE_VISIBILITY void __unlock() const {
1184    __cxx_atomic_store(&__a_lock, _LIBCPP_ATOMIC_FLAG_TYPE(false), memory_order_release);
1185  }
1186  _LIBCPP_INLINE_VISIBILITY _Tp __read() const volatile {
1187    __lock();
1188    _Tp __old;
1189    __cxx_atomic_assign_volatile(__old, __a_value);
1190    __unlock();
1191    return __old;
1192  }
1193  _LIBCPP_INLINE_VISIBILITY _Tp __read() const {
1194    __lock();
1195    _Tp __old = __a_value;
1196    __unlock();
1197    return __old;
1198  }
1199};
1200
1201template <typename _Tp>
1202_LIBCPP_INLINE_VISIBILITY
1203void __cxx_atomic_init(volatile __cxx_atomic_lock_impl<_Tp>* __a,  _Tp __val) {
1204  __cxx_atomic_assign_volatile(__a->__a_value, __val);
1205}
1206template <typename _Tp>
1207_LIBCPP_INLINE_VISIBILITY
1208void __cxx_atomic_init(__cxx_atomic_lock_impl<_Tp>* __a,  _Tp __val) {
1209  __a->__a_value = __val;
1210}
1211
1212template <typename _Tp>
1213_LIBCPP_INLINE_VISIBILITY
1214void __cxx_atomic_store(volatile __cxx_atomic_lock_impl<_Tp>* __a,  _Tp __val, memory_order) {
1215  __a->__lock();
1216  __cxx_atomic_assign_volatile(__a->__a_value, __val);
1217  __a->__unlock();
1218}
1219template <typename _Tp>
1220_LIBCPP_INLINE_VISIBILITY
1221void __cxx_atomic_store(__cxx_atomic_lock_impl<_Tp>* __a,  _Tp __val, memory_order) {
1222  __a->__lock();
1223  __a->__a_value = __val;
1224  __a->__unlock();
1225}
1226
1227template <typename _Tp>
1228_LIBCPP_INLINE_VISIBILITY
1229_Tp __cxx_atomic_load(const volatile __cxx_atomic_lock_impl<_Tp>* __a, memory_order) {
1230  return __a->__read();
1231}
1232template <typename _Tp>
1233_LIBCPP_INLINE_VISIBILITY
1234_Tp __cxx_atomic_load(const __cxx_atomic_lock_impl<_Tp>* __a, memory_order) {
1235  return __a->__read();
1236}
1237
1238template <typename _Tp>
1239_LIBCPP_INLINE_VISIBILITY
1240_Tp __cxx_atomic_exchange(volatile __cxx_atomic_lock_impl<_Tp>* __a, _Tp __value, memory_order) {
1241  __a->__lock();
1242  _Tp __old;
1243  __cxx_atomic_assign_volatile(__old, __a->__a_value);
1244  __cxx_atomic_assign_volatile(__a->__a_value, __value);
1245  __a->__unlock();
1246  return __old;
1247}
1248template <typename _Tp>
1249_LIBCPP_INLINE_VISIBILITY
1250_Tp __cxx_atomic_exchange(__cxx_atomic_lock_impl<_Tp>* __a, _Tp __value, memory_order) {
1251  __a->__lock();
1252  _Tp __old = __a->__a_value;
1253  __a->__a_value = __value;
1254  __a->__unlock();
1255  return __old;
1256}
1257
1258template <typename _Tp>
1259_LIBCPP_INLINE_VISIBILITY
1260bool __cxx_atomic_compare_exchange_strong(volatile __cxx_atomic_lock_impl<_Tp>* __a,
1261                                          _Tp* __expected, _Tp __value, memory_order, memory_order) {
1262  _Tp __temp;
1263  __a->__lock();
1264  __cxx_atomic_assign_volatile(__temp, __a->__a_value);
1265  bool __ret = (memcmp(&__temp, __expected, sizeof(_Tp)) == 0);
1266  if(__ret)
1267    __cxx_atomic_assign_volatile(__a->__a_value, __value);
1268  else
1269    __cxx_atomic_assign_volatile(*__expected, __a->__a_value);
1270  __a->__unlock();
1271  return __ret;
1272}
1273template <typename _Tp>
1274_LIBCPP_INLINE_VISIBILITY
1275bool __cxx_atomic_compare_exchange_strong(__cxx_atomic_lock_impl<_Tp>* __a,
1276                                          _Tp* __expected, _Tp __value, memory_order, memory_order) {
1277  __a->__lock();
1278  bool __ret = (memcmp(&__a->__a_value, __expected, sizeof(_Tp)) == 0);
1279  if(__ret)
1280    memcpy(&__a->__a_value, &__value, sizeof(_Tp));
1281  else
1282    memcpy(__expected, &__a->__a_value, sizeof(_Tp));
1283  __a->__unlock();
1284  return __ret;
1285}
1286
1287template <typename _Tp>
1288_LIBCPP_INLINE_VISIBILITY
1289bool __cxx_atomic_compare_exchange_weak(volatile __cxx_atomic_lock_impl<_Tp>* __a,
1290                                        _Tp* __expected, _Tp __value, memory_order, memory_order) {
1291  _Tp __temp;
1292  __a->__lock();
1293  __cxx_atomic_assign_volatile(__temp, __a->__a_value);
1294  bool __ret = (memcmp(&__temp, __expected, sizeof(_Tp)) == 0);
1295  if(__ret)
1296    __cxx_atomic_assign_volatile(__a->__a_value, __value);
1297  else
1298    __cxx_atomic_assign_volatile(*__expected, __a->__a_value);
1299  __a->__unlock();
1300  return __ret;
1301}
1302template <typename _Tp>
1303_LIBCPP_INLINE_VISIBILITY
1304bool __cxx_atomic_compare_exchange_weak(__cxx_atomic_lock_impl<_Tp>* __a,
1305                                        _Tp* __expected, _Tp __value, memory_order, memory_order) {
1306  __a->__lock();
1307  bool __ret = (memcmp(&__a->__a_value, __expected, sizeof(_Tp)) == 0);
1308  if(__ret)
1309    memcpy(&__a->__a_value, &__value, sizeof(_Tp));
1310  else
1311    memcpy(__expected, &__a->__a_value, sizeof(_Tp));
1312  __a->__unlock();
1313  return __ret;
1314}
1315
1316template <typename _Tp, typename _Td>
1317_LIBCPP_INLINE_VISIBILITY
1318_Tp __cxx_atomic_fetch_add(volatile __cxx_atomic_lock_impl<_Tp>* __a,
1319                           _Td __delta, memory_order) {
1320  __a->__lock();
1321  _Tp __old;
1322  __cxx_atomic_assign_volatile(__old, __a->__a_value);
1323  __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old + __delta));
1324  __a->__unlock();
1325  return __old;
1326}
1327template <typename _Tp, typename _Td>
1328_LIBCPP_INLINE_VISIBILITY
1329_Tp __cxx_atomic_fetch_add(__cxx_atomic_lock_impl<_Tp>* __a,
1330                           _Td __delta, memory_order) {
1331  __a->__lock();
1332  _Tp __old = __a->__a_value;
1333  __a->__a_value += __delta;
1334  __a->__unlock();
1335  return __old;
1336}
1337
1338template <typename _Tp, typename _Td>
1339_LIBCPP_INLINE_VISIBILITY
1340_Tp* __cxx_atomic_fetch_add(volatile __cxx_atomic_lock_impl<_Tp*>* __a,
1341                           ptrdiff_t __delta, memory_order) {
1342  __a->__lock();
1343  _Tp* __old;
1344  __cxx_atomic_assign_volatile(__old, __a->__a_value);
1345  __cxx_atomic_assign_volatile(__a->__a_value, __old + __delta);
1346  __a->__unlock();
1347  return __old;
1348}
1349template <typename _Tp, typename _Td>
1350_LIBCPP_INLINE_VISIBILITY
1351_Tp* __cxx_atomic_fetch_add(__cxx_atomic_lock_impl<_Tp*>* __a,
1352                           ptrdiff_t __delta, memory_order) {
1353  __a->__lock();
1354  _Tp* __old = __a->__a_value;
1355  __a->__a_value += __delta;
1356  __a->__unlock();
1357  return __old;
1358}
1359
1360template <typename _Tp, typename _Td>
1361_LIBCPP_INLINE_VISIBILITY
1362_Tp __cxx_atomic_fetch_sub(volatile __cxx_atomic_lock_impl<_Tp>* __a,
1363                           _Td __delta, memory_order) {
1364  __a->__lock();
1365  _Tp __old;
1366  __cxx_atomic_assign_volatile(__old, __a->__a_value);
1367  __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old - __delta));
1368  __a->__unlock();
1369  return __old;
1370}
1371template <typename _Tp, typename _Td>
1372_LIBCPP_INLINE_VISIBILITY
1373_Tp __cxx_atomic_fetch_sub(__cxx_atomic_lock_impl<_Tp>* __a,
1374                           _Td __delta, memory_order) {
1375  __a->__lock();
1376  _Tp __old = __a->__a_value;
1377  __a->__a_value -= __delta;
1378  __a->__unlock();
1379  return __old;
1380}
1381
1382template <typename _Tp>
1383_LIBCPP_INLINE_VISIBILITY
1384_Tp __cxx_atomic_fetch_and(volatile __cxx_atomic_lock_impl<_Tp>* __a,
1385                           _Tp __pattern, memory_order) {
1386  __a->__lock();
1387  _Tp __old;
1388  __cxx_atomic_assign_volatile(__old, __a->__a_value);
1389  __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old & __pattern));
1390  __a->__unlock();
1391  return __old;
1392}
1393template <typename _Tp>
1394_LIBCPP_INLINE_VISIBILITY
1395_Tp __cxx_atomic_fetch_and(__cxx_atomic_lock_impl<_Tp>* __a,
1396                           _Tp __pattern, memory_order) {
1397  __a->__lock();
1398  _Tp __old = __a->__a_value;
1399  __a->__a_value &= __pattern;
1400  __a->__unlock();
1401  return __old;
1402}
1403
1404template <typename _Tp>
1405_LIBCPP_INLINE_VISIBILITY
1406_Tp __cxx_atomic_fetch_or(volatile __cxx_atomic_lock_impl<_Tp>* __a,
1407                          _Tp __pattern, memory_order) {
1408  __a->__lock();
1409  _Tp __old;
1410  __cxx_atomic_assign_volatile(__old, __a->__a_value);
1411  __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old | __pattern));
1412  __a->__unlock();
1413  return __old;
1414}
1415template <typename _Tp>
1416_LIBCPP_INLINE_VISIBILITY
1417_Tp __cxx_atomic_fetch_or(__cxx_atomic_lock_impl<_Tp>* __a,
1418                          _Tp __pattern, memory_order) {
1419  __a->__lock();
1420  _Tp __old = __a->__a_value;
1421  __a->__a_value |= __pattern;
1422  __a->__unlock();
1423  return __old;
1424}
1425
1426template <typename _Tp>
1427_LIBCPP_INLINE_VISIBILITY
1428_Tp __cxx_atomic_fetch_xor(volatile __cxx_atomic_lock_impl<_Tp>* __a,
1429                           _Tp __pattern, memory_order) {
1430  __a->__lock();
1431  _Tp __old;
1432  __cxx_atomic_assign_volatile(__old, __a->__a_value);
1433  __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old ^ __pattern));
1434  __a->__unlock();
1435  return __old;
1436}
1437template <typename _Tp>
1438_LIBCPP_INLINE_VISIBILITY
1439_Tp __cxx_atomic_fetch_xor(__cxx_atomic_lock_impl<_Tp>* __a,
1440                           _Tp __pattern, memory_order) {
1441  __a->__lock();
1442  _Tp __old = __a->__a_value;
1443  __a->__a_value ^= __pattern;
1444  __a->__unlock();
1445  return __old;
1446}
1447
1448#ifdef __cpp_lib_atomic_is_always_lock_free
1449
1450template<typename _Tp> struct __cxx_is_always_lock_free {
1451    enum { __value = __atomic_always_lock_free(sizeof(_Tp), 0) }; };
1452
1453#else
1454
1455template<typename _Tp> struct __cxx_is_always_lock_free { enum { __value = false }; };
1456// Implementations must match the C ATOMIC_*_LOCK_FREE macro values.
1457template<> struct __cxx_is_always_lock_free<bool> { enum { __value = 2 == ATOMIC_BOOL_LOCK_FREE }; };
1458template<> struct __cxx_is_always_lock_free<char> { enum { __value = 2 == ATOMIC_CHAR_LOCK_FREE }; };
1459template<> struct __cxx_is_always_lock_free<signed char> { enum { __value = 2 == ATOMIC_CHAR_LOCK_FREE }; };
1460template<> struct __cxx_is_always_lock_free<unsigned char> { enum { __value = 2 == ATOMIC_CHAR_LOCK_FREE }; };
1461#ifndef _LIBCPP_NO_HAS_CHAR8_T
1462template<> struct __cxx_is_always_lock_free<char8_t> { enum { __value = 2 == ATOMIC_CHAR8_T_LOCK_FREE }; };
1463#endif
1464template<> struct __cxx_is_always_lock_free<char16_t> { enum { __value = 2 == ATOMIC_CHAR16_T_LOCK_FREE }; };
1465template<> struct __cxx_is_always_lock_free<char32_t> { enum { __value = 2 == ATOMIC_CHAR32_T_LOCK_FREE }; };
1466template<> struct __cxx_is_always_lock_free<wchar_t> { enum { __value = 2 == ATOMIC_WCHAR_T_LOCK_FREE }; };
1467template<> struct __cxx_is_always_lock_free<short> { enum { __value = 2 == ATOMIC_SHORT_LOCK_FREE }; };
1468template<> struct __cxx_is_always_lock_free<unsigned short> { enum { __value = 2 == ATOMIC_SHORT_LOCK_FREE }; };
1469template<> struct __cxx_is_always_lock_free<int> { enum { __value = 2 == ATOMIC_INT_LOCK_FREE }; };
1470template<> struct __cxx_is_always_lock_free<unsigned int> { enum { __value = 2 == ATOMIC_INT_LOCK_FREE }; };
1471template<> struct __cxx_is_always_lock_free<long> { enum { __value = 2 == ATOMIC_LONG_LOCK_FREE }; };
1472template<> struct __cxx_is_always_lock_free<unsigned long> { enum { __value = 2 == ATOMIC_LONG_LOCK_FREE }; };
1473template<> struct __cxx_is_always_lock_free<long long> { enum { __value = 2 == ATOMIC_LLONG_LOCK_FREE }; };
1474template<> struct __cxx_is_always_lock_free<unsigned long long> { enum { __value = 2 == ATOMIC_LLONG_LOCK_FREE }; };
1475template<typename _Tp> struct __cxx_is_always_lock_free<_Tp*> { enum { __value = 2 == ATOMIC_POINTER_LOCK_FREE }; };
1476template<> struct __cxx_is_always_lock_free<std::nullptr_t> { enum { __value = 2 == ATOMIC_POINTER_LOCK_FREE }; };
1477
1478#endif //__cpp_lib_atomic_is_always_lock_free
1479
1480template <typename _Tp,
1481          typename _Base = typename conditional<__cxx_is_always_lock_free<_Tp>::__value,
1482                                                __cxx_atomic_base_impl<_Tp>,
1483                                                __cxx_atomic_lock_impl<_Tp> >::type>
1484#else
1485template <typename _Tp,
1486          typename _Base = __cxx_atomic_base_impl<_Tp> >
1487#endif //_LIBCPP_ATOMIC_ONLY_USE_BUILTINS
1488struct __cxx_atomic_impl : public _Base {
1489
1490#if _GNUC_VER >= 501
1491    static_assert(is_trivially_copyable<_Tp>::value,
1492      "std::atomic<Tp> requires that 'Tp' be a trivially copyable type");
1493#endif
1494
1495  _LIBCPP_INLINE_VISIBILITY __cxx_atomic_impl() _NOEXCEPT _LIBCPP_DEFAULT
1496  _LIBCPP_INLINE_VISIBILITY _LIBCPP_CONSTEXPR explicit __cxx_atomic_impl(_Tp value) _NOEXCEPT
1497    : _Base(value) {}
1498};
1499
1500#ifdef __linux__
1501    using __cxx_contention_t = int32_t;
1502#else
1503    using __cxx_contention_t = int64_t;
1504#endif //__linux__
1505
1506#if _LIBCPP_STD_VER >= 11
1507
1508using __cxx_atomic_contention_t = __cxx_atomic_impl<__cxx_contention_t>;
1509
1510#ifndef _LIBCPP_HAS_NO_PLATFORM_WAIT
1511
1512_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __cxx_atomic_notify_one(void const volatile*);
1513_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __cxx_atomic_notify_all(void const volatile*);
1514_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI __cxx_contention_t __libcpp_atomic_monitor(void const volatile*);
1515_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __libcpp_atomic_wait(void const volatile*, __cxx_contention_t);
1516
1517_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __cxx_atomic_notify_one(__cxx_atomic_contention_t const volatile*);
1518_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __cxx_atomic_notify_all(__cxx_atomic_contention_t const volatile*);
1519_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI __cxx_contention_t __libcpp_atomic_monitor(__cxx_atomic_contention_t const volatile*);
1520_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __libcpp_atomic_wait(__cxx_atomic_contention_t const volatile*, __cxx_contention_t);
1521
1522template <class _Atp, class _Fn>
1523struct __libcpp_atomic_wait_backoff_impl {
1524    _Atp* __a;
1525    _Fn __test_fn;
1526    _LIBCPP_AVAILABILITY_SYNC
1527    _LIBCPP_INLINE_VISIBILITY bool operator()(chrono::nanoseconds __elapsed) const
1528    {
1529        if(__elapsed > chrono::microseconds(64))
1530        {
1531            auto const __monitor = __libcpp_atomic_monitor(__a);
1532            if(__test_fn())
1533                return true;
1534            __libcpp_atomic_wait(__a, __monitor);
1535        }
1536        else if(__elapsed > chrono::microseconds(4))
1537            __libcpp_thread_yield();
1538        else
1539            {} // poll
1540        return false;
1541    }
1542};
1543
1544template <class _Atp, class _Fn>
1545_LIBCPP_AVAILABILITY_SYNC
1546_LIBCPP_INLINE_VISIBILITY bool __cxx_atomic_wait(_Atp* __a, _Fn && __test_fn)
1547{
1548    __libcpp_atomic_wait_backoff_impl<_Atp, typename decay<_Fn>::type> __backoff_fn = {__a, __test_fn};
1549    return __libcpp_thread_poll_with_backoff(__test_fn, __backoff_fn);
1550}
1551
1552#else // _LIBCPP_HAS_NO_PLATFORM_WAIT
1553
1554template <class _Tp>
1555_LIBCPP_INLINE_VISIBILITY void __cxx_atomic_notify_all(__cxx_atomic_impl<_Tp> const volatile*) { }
1556template <class _Tp>
1557_LIBCPP_INLINE_VISIBILITY void __cxx_atomic_notify_one(__cxx_atomic_impl<_Tp> const volatile*) { }
1558template <class _Atp, class _Fn>
1559_LIBCPP_INLINE_VISIBILITY bool __cxx_atomic_wait(_Atp*, _Fn && __test_fn)
1560{
1561    return __libcpp_thread_poll_with_backoff(__test_fn, __libcpp_timed_backoff_policy());
1562}
1563
1564#endif // _LIBCPP_HAS_NO_PLATFORM_WAIT
1565
1566template <class _Atp, class _Tp>
1567struct __cxx_atomic_wait_test_fn_impl {
1568    _Atp* __a;
1569    _Tp __val;
1570    memory_order __order;
1571    _LIBCPP_INLINE_VISIBILITY bool operator()() const
1572    {
1573        return !__cxx_nonatomic_compare_equal(__cxx_atomic_load(__a, __order), __val);
1574    }
1575};
1576
1577template <class _Atp, class _Tp>
1578_LIBCPP_AVAILABILITY_SYNC
1579_LIBCPP_INLINE_VISIBILITY bool __cxx_atomic_wait(_Atp* __a, _Tp const __val, memory_order __order)
1580{
1581    __cxx_atomic_wait_test_fn_impl<_Atp, _Tp> __test_fn = {__a, __val, __order};
1582    return __cxx_atomic_wait(__a, __test_fn);
1583}
1584
1585#endif //_LIBCPP_STD_VER >= 11
1586
1587// general atomic<T>
1588
1589template <class _Tp, bool = is_integral<_Tp>::value && !is_same<_Tp, bool>::value>
1590struct __atomic_base  // false
1591{
1592    mutable __cxx_atomic_impl<_Tp> __a_;
1593
1594#if defined(__cpp_lib_atomic_is_always_lock_free)
1595  static _LIBCPP_CONSTEXPR bool is_always_lock_free = __atomic_always_lock_free(sizeof(__a_), 0);
1596#endif
1597
1598    _LIBCPP_INLINE_VISIBILITY
1599    bool is_lock_free() const volatile _NOEXCEPT
1600        {return __cxx_atomic_is_lock_free(sizeof(_Tp));}
1601    _LIBCPP_INLINE_VISIBILITY
1602    bool is_lock_free() const _NOEXCEPT
1603        {return static_cast<__atomic_base const volatile*>(this)->is_lock_free();}
1604    _LIBCPP_INLINE_VISIBILITY
1605    void store(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1606      _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m)
1607        {__cxx_atomic_store(&__a_, __d, __m);}
1608    _LIBCPP_INLINE_VISIBILITY
1609    void store(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1610      _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m)
1611        {__cxx_atomic_store(&__a_, __d, __m);}
1612    _LIBCPP_INLINE_VISIBILITY
1613    _Tp load(memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT
1614      _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
1615        {return __cxx_atomic_load(&__a_, __m);}
1616    _LIBCPP_INLINE_VISIBILITY
1617    _Tp load(memory_order __m = memory_order_seq_cst) const _NOEXCEPT
1618      _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
1619        {return __cxx_atomic_load(&__a_, __m);}
1620    _LIBCPP_INLINE_VISIBILITY
1621    operator _Tp() const volatile _NOEXCEPT {return load();}
1622    _LIBCPP_INLINE_VISIBILITY
1623    operator _Tp() const _NOEXCEPT          {return load();}
1624    _LIBCPP_INLINE_VISIBILITY
1625    _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1626        {return __cxx_atomic_exchange(&__a_, __d, __m);}
1627    _LIBCPP_INLINE_VISIBILITY
1628    _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1629        {return __cxx_atomic_exchange(&__a_, __d, __m);}
1630    _LIBCPP_INLINE_VISIBILITY
1631    bool compare_exchange_weak(_Tp& __e, _Tp __d,
1632                               memory_order __s, memory_order __f) volatile _NOEXCEPT
1633      _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
1634        {return __cxx_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);}
1635    _LIBCPP_INLINE_VISIBILITY
1636    bool compare_exchange_weak(_Tp& __e, _Tp __d,
1637                               memory_order __s, memory_order __f) _NOEXCEPT
1638      _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
1639        {return __cxx_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);}
1640    _LIBCPP_INLINE_VISIBILITY
1641    bool compare_exchange_strong(_Tp& __e, _Tp __d,
1642                                 memory_order __s, memory_order __f) volatile _NOEXCEPT
1643      _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
1644        {return __cxx_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);}
1645    _LIBCPP_INLINE_VISIBILITY
1646    bool compare_exchange_strong(_Tp& __e, _Tp __d,
1647                                 memory_order __s, memory_order __f) _NOEXCEPT
1648      _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
1649        {return __cxx_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);}
1650    _LIBCPP_INLINE_VISIBILITY
1651    bool compare_exchange_weak(_Tp& __e, _Tp __d,
1652                              memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1653        {return __cxx_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);}
1654    _LIBCPP_INLINE_VISIBILITY
1655    bool compare_exchange_weak(_Tp& __e, _Tp __d,
1656                               memory_order __m = memory_order_seq_cst) _NOEXCEPT
1657        {return __cxx_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);}
1658    _LIBCPP_INLINE_VISIBILITY
1659    bool compare_exchange_strong(_Tp& __e, _Tp __d,
1660                              memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1661        {return __cxx_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);}
1662    _LIBCPP_INLINE_VISIBILITY
1663    bool compare_exchange_strong(_Tp& __e, _Tp __d,
1664                                 memory_order __m = memory_order_seq_cst) _NOEXCEPT
1665        {return __cxx_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);}
1666
1667    _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY void wait(_Tp __v, memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT
1668        {__cxx_atomic_wait(&__a_, __v, __m);}
1669    _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY void wait(_Tp __v, memory_order __m = memory_order_seq_cst) const _NOEXCEPT
1670        {__cxx_atomic_wait(&__a_, __v, __m);}
1671    _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY void notify_one() volatile _NOEXCEPT
1672        {__cxx_atomic_notify_one(&__a_);}
1673    _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY void notify_one() _NOEXCEPT
1674        {__cxx_atomic_notify_one(&__a_);}
1675    _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY void notify_all() volatile _NOEXCEPT
1676        {__cxx_atomic_notify_all(&__a_);}
1677    _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY void notify_all() _NOEXCEPT
1678        {__cxx_atomic_notify_all(&__a_);}
1679
1680    _LIBCPP_INLINE_VISIBILITY
1681    __atomic_base() _NOEXCEPT _LIBCPP_DEFAULT
1682
1683    _LIBCPP_INLINE_VISIBILITY _LIBCPP_CONSTEXPR
1684    __atomic_base(_Tp __d) _NOEXCEPT : __a_(__d) {}
1685
1686#ifndef _LIBCPP_CXX03_LANG
1687    __atomic_base(const __atomic_base&) = delete;
1688    __atomic_base& operator=(const __atomic_base&) = delete;
1689    __atomic_base& operator=(const __atomic_base&) volatile = delete;
1690#else
1691private:
1692    _LIBCPP_INLINE_VISIBILITY
1693    __atomic_base(const __atomic_base&);
1694    _LIBCPP_INLINE_VISIBILITY
1695    __atomic_base& operator=(const __atomic_base&);
1696    _LIBCPP_INLINE_VISIBILITY
1697    __atomic_base& operator=(const __atomic_base&) volatile;
1698#endif
1699};
1700
1701#if defined(__cpp_lib_atomic_is_always_lock_free)
1702template <class _Tp, bool __b>
1703_LIBCPP_CONSTEXPR bool __atomic_base<_Tp, __b>::is_always_lock_free;
1704#endif
1705
1706// atomic<Integral>
1707
1708template <class _Tp>
1709struct __atomic_base<_Tp, true>
1710    : public __atomic_base<_Tp, false>
1711{
1712    typedef __atomic_base<_Tp, false> __base;
1713    _LIBCPP_INLINE_VISIBILITY
1714    __atomic_base() _NOEXCEPT _LIBCPP_DEFAULT
1715    _LIBCPP_INLINE_VISIBILITY
1716    _LIBCPP_CONSTEXPR __atomic_base(_Tp __d) _NOEXCEPT : __base(__d) {}
1717
1718    _LIBCPP_INLINE_VISIBILITY
1719    _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1720        {return __cxx_atomic_fetch_add(&this->__a_, __op, __m);}
1721    _LIBCPP_INLINE_VISIBILITY
1722    _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1723        {return __cxx_atomic_fetch_add(&this->__a_, __op, __m);}
1724    _LIBCPP_INLINE_VISIBILITY
1725    _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1726        {return __cxx_atomic_fetch_sub(&this->__a_, __op, __m);}
1727    _LIBCPP_INLINE_VISIBILITY
1728    _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1729        {return __cxx_atomic_fetch_sub(&this->__a_, __op, __m);}
1730    _LIBCPP_INLINE_VISIBILITY
1731    _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1732        {return __cxx_atomic_fetch_and(&this->__a_, __op, __m);}
1733    _LIBCPP_INLINE_VISIBILITY
1734    _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1735        {return __cxx_atomic_fetch_and(&this->__a_, __op, __m);}
1736    _LIBCPP_INLINE_VISIBILITY
1737    _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1738        {return __cxx_atomic_fetch_or(&this->__a_, __op, __m);}
1739    _LIBCPP_INLINE_VISIBILITY
1740    _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1741        {return __cxx_atomic_fetch_or(&this->__a_, __op, __m);}
1742    _LIBCPP_INLINE_VISIBILITY
1743    _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1744        {return __cxx_atomic_fetch_xor(&this->__a_, __op, __m);}
1745    _LIBCPP_INLINE_VISIBILITY
1746    _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1747        {return __cxx_atomic_fetch_xor(&this->__a_, __op, __m);}
1748
1749    _LIBCPP_INLINE_VISIBILITY
1750    _Tp operator++(int) volatile _NOEXCEPT      {return fetch_add(_Tp(1));}
1751    _LIBCPP_INLINE_VISIBILITY
1752    _Tp operator++(int) _NOEXCEPT               {return fetch_add(_Tp(1));}
1753    _LIBCPP_INLINE_VISIBILITY
1754    _Tp operator--(int) volatile _NOEXCEPT      {return fetch_sub(_Tp(1));}
1755    _LIBCPP_INLINE_VISIBILITY
1756    _Tp operator--(int) _NOEXCEPT               {return fetch_sub(_Tp(1));}
1757    _LIBCPP_INLINE_VISIBILITY
1758    _Tp operator++() volatile _NOEXCEPT         {return fetch_add(_Tp(1)) + _Tp(1);}
1759    _LIBCPP_INLINE_VISIBILITY
1760    _Tp operator++() _NOEXCEPT                  {return fetch_add(_Tp(1)) + _Tp(1);}
1761    _LIBCPP_INLINE_VISIBILITY
1762    _Tp operator--() volatile _NOEXCEPT         {return fetch_sub(_Tp(1)) - _Tp(1);}
1763    _LIBCPP_INLINE_VISIBILITY
1764    _Tp operator--() _NOEXCEPT                  {return fetch_sub(_Tp(1)) - _Tp(1);}
1765    _LIBCPP_INLINE_VISIBILITY
1766    _Tp operator+=(_Tp __op) volatile _NOEXCEPT {return fetch_add(__op) + __op;}
1767    _LIBCPP_INLINE_VISIBILITY
1768    _Tp operator+=(_Tp __op) _NOEXCEPT          {return fetch_add(__op) + __op;}
1769    _LIBCPP_INLINE_VISIBILITY
1770    _Tp operator-=(_Tp __op) volatile _NOEXCEPT {return fetch_sub(__op) - __op;}
1771    _LIBCPP_INLINE_VISIBILITY
1772    _Tp operator-=(_Tp __op) _NOEXCEPT          {return fetch_sub(__op) - __op;}
1773    _LIBCPP_INLINE_VISIBILITY
1774    _Tp operator&=(_Tp __op) volatile _NOEXCEPT {return fetch_and(__op) & __op;}
1775    _LIBCPP_INLINE_VISIBILITY
1776    _Tp operator&=(_Tp __op) _NOEXCEPT          {return fetch_and(__op) & __op;}
1777    _LIBCPP_INLINE_VISIBILITY
1778    _Tp operator|=(_Tp __op) volatile _NOEXCEPT {return fetch_or(__op) | __op;}
1779    _LIBCPP_INLINE_VISIBILITY
1780    _Tp operator|=(_Tp __op) _NOEXCEPT          {return fetch_or(__op) | __op;}
1781    _LIBCPP_INLINE_VISIBILITY
1782    _Tp operator^=(_Tp __op) volatile _NOEXCEPT {return fetch_xor(__op) ^ __op;}
1783    _LIBCPP_INLINE_VISIBILITY
1784    _Tp operator^=(_Tp __op) _NOEXCEPT          {return fetch_xor(__op) ^ __op;}
1785};
1786
1787// atomic<T>
1788
1789template <class _Tp>
1790struct atomic
1791    : public __atomic_base<_Tp>
1792{
1793    typedef __atomic_base<_Tp> __base;
1794    typedef _Tp value_type;
1795    typedef value_type difference_type;
1796    _LIBCPP_INLINE_VISIBILITY
1797    atomic() _NOEXCEPT _LIBCPP_DEFAULT
1798    _LIBCPP_INLINE_VISIBILITY
1799    _LIBCPP_CONSTEXPR atomic(_Tp __d) _NOEXCEPT : __base(__d) {}
1800
1801    _LIBCPP_INLINE_VISIBILITY
1802    _Tp operator=(_Tp __d) volatile _NOEXCEPT
1803        {__base::store(__d); return __d;}
1804    _LIBCPP_INLINE_VISIBILITY
1805    _Tp operator=(_Tp __d) _NOEXCEPT
1806        {__base::store(__d); return __d;}
1807};
1808
1809// atomic<T*>
1810
1811template <class _Tp>
1812struct atomic<_Tp*>
1813    : public __atomic_base<_Tp*>
1814{
1815    typedef __atomic_base<_Tp*> __base;
1816    typedef _Tp* value_type;
1817    typedef ptrdiff_t difference_type;
1818    _LIBCPP_INLINE_VISIBILITY
1819    atomic() _NOEXCEPT _LIBCPP_DEFAULT
1820    _LIBCPP_INLINE_VISIBILITY
1821    _LIBCPP_CONSTEXPR atomic(_Tp* __d) _NOEXCEPT : __base(__d) {}
1822
1823    _LIBCPP_INLINE_VISIBILITY
1824    _Tp* operator=(_Tp* __d) volatile _NOEXCEPT
1825        {__base::store(__d); return __d;}
1826    _LIBCPP_INLINE_VISIBILITY
1827    _Tp* operator=(_Tp* __d) _NOEXCEPT
1828        {__base::store(__d); return __d;}
1829
1830    _LIBCPP_INLINE_VISIBILITY
1831    _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst)
1832                                                                        volatile _NOEXCEPT
1833        {return __cxx_atomic_fetch_add(&this->__a_, __op, __m);}
1834    _LIBCPP_INLINE_VISIBILITY
1835    _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1836        {return __cxx_atomic_fetch_add(&this->__a_, __op, __m);}
1837    _LIBCPP_INLINE_VISIBILITY
1838    _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst)
1839                                                                        volatile _NOEXCEPT
1840        {return __cxx_atomic_fetch_sub(&this->__a_, __op, __m);}
1841    _LIBCPP_INLINE_VISIBILITY
1842    _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1843        {return __cxx_atomic_fetch_sub(&this->__a_, __op, __m);}
1844
1845    _LIBCPP_INLINE_VISIBILITY
1846    _Tp* operator++(int) volatile _NOEXCEPT            {return fetch_add(1);}
1847    _LIBCPP_INLINE_VISIBILITY
1848    _Tp* operator++(int) _NOEXCEPT                     {return fetch_add(1);}
1849    _LIBCPP_INLINE_VISIBILITY
1850    _Tp* operator--(int) volatile _NOEXCEPT            {return fetch_sub(1);}
1851    _LIBCPP_INLINE_VISIBILITY
1852    _Tp* operator--(int) _NOEXCEPT                     {return fetch_sub(1);}
1853    _LIBCPP_INLINE_VISIBILITY
1854    _Tp* operator++() volatile _NOEXCEPT               {return fetch_add(1) + 1;}
1855    _LIBCPP_INLINE_VISIBILITY
1856    _Tp* operator++() _NOEXCEPT                        {return fetch_add(1) + 1;}
1857    _LIBCPP_INLINE_VISIBILITY
1858    _Tp* operator--() volatile _NOEXCEPT               {return fetch_sub(1) - 1;}
1859    _LIBCPP_INLINE_VISIBILITY
1860    _Tp* operator--() _NOEXCEPT                        {return fetch_sub(1) - 1;}
1861    _LIBCPP_INLINE_VISIBILITY
1862    _Tp* operator+=(ptrdiff_t __op) volatile _NOEXCEPT {return fetch_add(__op) + __op;}
1863    _LIBCPP_INLINE_VISIBILITY
1864    _Tp* operator+=(ptrdiff_t __op) _NOEXCEPT          {return fetch_add(__op) + __op;}
1865    _LIBCPP_INLINE_VISIBILITY
1866    _Tp* operator-=(ptrdiff_t __op) volatile _NOEXCEPT {return fetch_sub(__op) - __op;}
1867    _LIBCPP_INLINE_VISIBILITY
1868    _Tp* operator-=(ptrdiff_t __op) _NOEXCEPT          {return fetch_sub(__op) - __op;}
1869};
1870
1871// atomic_is_lock_free
1872
1873template <class _Tp>
1874_LIBCPP_INLINE_VISIBILITY
1875bool
1876atomic_is_lock_free(const volatile atomic<_Tp>* __o) _NOEXCEPT
1877{
1878    return __o->is_lock_free();
1879}
1880
1881template <class _Tp>
1882_LIBCPP_INLINE_VISIBILITY
1883bool
1884atomic_is_lock_free(const atomic<_Tp>* __o) _NOEXCEPT
1885{
1886    return __o->is_lock_free();
1887}
1888
1889// atomic_init
1890
1891template <class _Tp>
1892_LIBCPP_INLINE_VISIBILITY
1893void
1894atomic_init(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT
1895{
1896    __cxx_atomic_init(&__o->__a_, __d);
1897}
1898
1899template <class _Tp>
1900_LIBCPP_INLINE_VISIBILITY
1901void
1902atomic_init(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT
1903{
1904    __cxx_atomic_init(&__o->__a_, __d);
1905}
1906
1907// atomic_store
1908
1909template <class _Tp>
1910_LIBCPP_INLINE_VISIBILITY
1911void
1912atomic_store(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT
1913{
1914    __o->store(__d);
1915}
1916
1917template <class _Tp>
1918_LIBCPP_INLINE_VISIBILITY
1919void
1920atomic_store(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT
1921{
1922    __o->store(__d);
1923}
1924
1925// atomic_store_explicit
1926
1927template <class _Tp>
1928_LIBCPP_INLINE_VISIBILITY
1929void
1930atomic_store_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d, memory_order __m) _NOEXCEPT
1931  _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m)
1932{
1933    __o->store(__d, __m);
1934}
1935
1936template <class _Tp>
1937_LIBCPP_INLINE_VISIBILITY
1938void
1939atomic_store_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d, memory_order __m) _NOEXCEPT
1940  _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m)
1941{
1942    __o->store(__d, __m);
1943}
1944
1945// atomic_load
1946
1947template <class _Tp>
1948_LIBCPP_INLINE_VISIBILITY
1949_Tp
1950atomic_load(const volatile atomic<_Tp>* __o) _NOEXCEPT
1951{
1952    return __o->load();
1953}
1954
1955template <class _Tp>
1956_LIBCPP_INLINE_VISIBILITY
1957_Tp
1958atomic_load(const atomic<_Tp>* __o) _NOEXCEPT
1959{
1960    return __o->load();
1961}
1962
1963// atomic_load_explicit
1964
1965template <class _Tp>
1966_LIBCPP_INLINE_VISIBILITY
1967_Tp
1968atomic_load_explicit(const volatile atomic<_Tp>* __o, memory_order __m) _NOEXCEPT
1969  _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
1970{
1971    return __o->load(__m);
1972}
1973
1974template <class _Tp>
1975_LIBCPP_INLINE_VISIBILITY
1976_Tp
1977atomic_load_explicit(const atomic<_Tp>* __o, memory_order __m) _NOEXCEPT
1978  _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
1979{
1980    return __o->load(__m);
1981}
1982
1983// atomic_exchange
1984
1985template <class _Tp>
1986_LIBCPP_INLINE_VISIBILITY
1987_Tp
1988atomic_exchange(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT
1989{
1990    return __o->exchange(__d);
1991}
1992
1993template <class _Tp>
1994_LIBCPP_INLINE_VISIBILITY
1995_Tp
1996atomic_exchange(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT
1997{
1998    return __o->exchange(__d);
1999}
2000
2001// atomic_exchange_explicit
2002
2003template <class _Tp>
2004_LIBCPP_INLINE_VISIBILITY
2005_Tp
2006atomic_exchange_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d, memory_order __m) _NOEXCEPT
2007{
2008    return __o->exchange(__d, __m);
2009}
2010
2011template <class _Tp>
2012_LIBCPP_INLINE_VISIBILITY
2013_Tp
2014atomic_exchange_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d, memory_order __m) _NOEXCEPT
2015{
2016    return __o->exchange(__d, __m);
2017}
2018
2019// atomic_compare_exchange_weak
2020
2021template <class _Tp>
2022_LIBCPP_INLINE_VISIBILITY
2023bool
2024atomic_compare_exchange_weak(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e, typename atomic<_Tp>::value_type __d) _NOEXCEPT
2025{
2026    return __o->compare_exchange_weak(*__e, __d);
2027}
2028
2029template <class _Tp>
2030_LIBCPP_INLINE_VISIBILITY
2031bool
2032atomic_compare_exchange_weak(atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e, typename atomic<_Tp>::value_type __d) _NOEXCEPT
2033{
2034    return __o->compare_exchange_weak(*__e, __d);
2035}
2036
2037// atomic_compare_exchange_strong
2038
2039template <class _Tp>
2040_LIBCPP_INLINE_VISIBILITY
2041bool
2042atomic_compare_exchange_strong(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e, typename atomic<_Tp>::value_type __d) _NOEXCEPT
2043{
2044    return __o->compare_exchange_strong(*__e, __d);
2045}
2046
2047template <class _Tp>
2048_LIBCPP_INLINE_VISIBILITY
2049bool
2050atomic_compare_exchange_strong(atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e, typename atomic<_Tp>::value_type __d) _NOEXCEPT
2051{
2052    return __o->compare_exchange_strong(*__e, __d);
2053}
2054
2055// atomic_compare_exchange_weak_explicit
2056
2057template <class _Tp>
2058_LIBCPP_INLINE_VISIBILITY
2059bool
2060atomic_compare_exchange_weak_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e,
2061                                      typename atomic<_Tp>::value_type __d,
2062                                      memory_order __s, memory_order __f) _NOEXCEPT
2063  _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
2064{
2065    return __o->compare_exchange_weak(*__e, __d, __s, __f);
2066}
2067
2068template <class _Tp>
2069_LIBCPP_INLINE_VISIBILITY
2070bool
2071atomic_compare_exchange_weak_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e, typename atomic<_Tp>::value_type __d,
2072                                      memory_order __s, memory_order __f) _NOEXCEPT
2073  _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
2074{
2075    return __o->compare_exchange_weak(*__e, __d, __s, __f);
2076}
2077
2078// atomic_compare_exchange_strong_explicit
2079
2080template <class _Tp>
2081_LIBCPP_INLINE_VISIBILITY
2082bool
2083atomic_compare_exchange_strong_explicit(volatile atomic<_Tp>* __o,
2084                                        typename atomic<_Tp>::value_type* __e, typename atomic<_Tp>::value_type __d,
2085                                        memory_order __s, memory_order __f) _NOEXCEPT
2086  _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
2087{
2088    return __o->compare_exchange_strong(*__e, __d, __s, __f);
2089}
2090
2091template <class _Tp>
2092_LIBCPP_INLINE_VISIBILITY
2093bool
2094atomic_compare_exchange_strong_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e,
2095                                        typename atomic<_Tp>::value_type __d,
2096                                        memory_order __s, memory_order __f) _NOEXCEPT
2097  _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
2098{
2099    return __o->compare_exchange_strong(*__e, __d, __s, __f);
2100}
2101
2102// atomic_wait
2103
2104template <class _Tp>
2105_LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2106void atomic_wait(const volatile atomic<_Tp>* __o,
2107                 typename atomic<_Tp>::value_type __v) _NOEXCEPT
2108{
2109    return __o->wait(__v);
2110}
2111
2112template <class _Tp>
2113_LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2114void atomic_wait(const atomic<_Tp>* __o,
2115                 typename atomic<_Tp>::value_type __v) _NOEXCEPT
2116{
2117    return __o->wait(__v);
2118}
2119
2120// atomic_wait_explicit
2121
2122template <class _Tp>
2123_LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2124void atomic_wait_explicit(const volatile atomic<_Tp>* __o,
2125                          typename atomic<_Tp>::value_type __v,
2126                          memory_order __m) _NOEXCEPT
2127  _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
2128{
2129    return __o->wait(__v, __m);
2130}
2131
2132template <class _Tp>
2133_LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2134void atomic_wait_explicit(const atomic<_Tp>* __o,
2135                          typename atomic<_Tp>::value_type __v,
2136                          memory_order __m) _NOEXCEPT
2137  _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
2138{
2139    return __o->wait(__v, __m);
2140}
2141
2142// atomic_notify_one
2143
2144template <class _Tp>
2145_LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2146void atomic_notify_one(volatile atomic<_Tp>* __o) _NOEXCEPT
2147{
2148    __o->notify_one();
2149}
2150template <class _Tp>
2151_LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2152void atomic_notify_one(atomic<_Tp>* __o) _NOEXCEPT
2153{
2154    __o->notify_one();
2155}
2156
2157// atomic_notify_one
2158
2159template <class _Tp>
2160_LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2161void atomic_notify_all(volatile atomic<_Tp>* __o) _NOEXCEPT
2162{
2163    __o->notify_all();
2164}
2165template <class _Tp>
2166_LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2167void atomic_notify_all(atomic<_Tp>* __o) _NOEXCEPT
2168{
2169    __o->notify_all();
2170}
2171
2172// atomic_fetch_add
2173
2174template <class _Tp>
2175_LIBCPP_INLINE_VISIBILITY
2176typename enable_if
2177<
2178    is_integral<_Tp>::value && !is_same<_Tp, bool>::value && !is_const<_Tp>::value,
2179    _Tp
2180>::type
2181atomic_fetch_add(volatile atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op) _NOEXCEPT
2182{
2183    return __o->fetch_add(__op);
2184}
2185
2186template <class _Tp>
2187_LIBCPP_INLINE_VISIBILITY
2188typename enable_if
2189<
2190    is_integral<_Tp>::value && !is_same<_Tp, bool>::value && !is_const<_Tp>::value,
2191    _Tp
2192>::type
2193atomic_fetch_add(atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op) _NOEXCEPT
2194{
2195    return __o->fetch_add(__op);
2196}
2197
2198template <class _Tp>
2199_LIBCPP_INLINE_VISIBILITY
2200_Tp*
2201atomic_fetch_add(volatile atomic<_Tp*>* __o, typename atomic<_Tp*>::difference_type __op) _NOEXCEPT
2202{
2203    return __o->fetch_add(__op);
2204}
2205
2206template <class _Tp>
2207_LIBCPP_INLINE_VISIBILITY
2208_Tp*
2209atomic_fetch_add(atomic<_Tp*>* __o, typename atomic<_Tp*>::difference_type __op) _NOEXCEPT
2210{
2211    return __o->fetch_add(__op);
2212}
2213
2214// atomic_fetch_add_explicit
2215
2216template <class _Tp>
2217_LIBCPP_INLINE_VISIBILITY
2218typename enable_if
2219<
2220    is_integral<_Tp>::value && !is_same<_Tp, bool>::value && !is_const<_Tp>::value,
2221    _Tp
2222>::type
2223atomic_fetch_add_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op, memory_order __m) _NOEXCEPT
2224{
2225    return __o->fetch_add(__op, __m);
2226}
2227
2228template <class _Tp>
2229_LIBCPP_INLINE_VISIBILITY
2230typename enable_if
2231<
2232    is_integral<_Tp>::value && !is_same<_Tp, bool>::value && !is_const<_Tp>::value,
2233    _Tp
2234>::type
2235atomic_fetch_add_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op, memory_order __m) _NOEXCEPT
2236{
2237    return __o->fetch_add(__op, __m);
2238}
2239
2240template <class _Tp>
2241_LIBCPP_INLINE_VISIBILITY
2242_Tp*
2243atomic_fetch_add_explicit(volatile atomic<_Tp*>* __o, typename atomic<_Tp*>::difference_type __op, memory_order __m) _NOEXCEPT
2244{
2245    return __o->fetch_add(__op, __m);
2246}
2247
2248template <class _Tp>
2249_LIBCPP_INLINE_VISIBILITY
2250_Tp*
2251atomic_fetch_add_explicit(atomic<_Tp*>* __o, typename atomic<_Tp*>::difference_type __op, memory_order __m) _NOEXCEPT
2252{
2253    return __o->fetch_add(__op, __m);
2254}
2255
2256// atomic_fetch_sub
2257
2258template <class _Tp>
2259_LIBCPP_INLINE_VISIBILITY
2260typename enable_if
2261<
2262    is_integral<_Tp>::value && !is_same<_Tp, bool>::value && !is_const<_Tp>::value,
2263    _Tp
2264>::type
2265atomic_fetch_sub(volatile atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op) _NOEXCEPT
2266{
2267    return __o->fetch_sub(__op);
2268}
2269
2270template <class _Tp>
2271_LIBCPP_INLINE_VISIBILITY
2272typename enable_if
2273<
2274    is_integral<_Tp>::value && !is_same<_Tp, bool>::value && !is_const<_Tp>::value,
2275    _Tp
2276>::type
2277atomic_fetch_sub(atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op) _NOEXCEPT
2278{
2279    return __o->fetch_sub(__op);
2280}
2281
2282template <class _Tp>
2283_LIBCPP_INLINE_VISIBILITY
2284_Tp*
2285atomic_fetch_sub(volatile atomic<_Tp*>* __o, typename atomic<_Tp*>::difference_type __op) _NOEXCEPT
2286{
2287    return __o->fetch_sub(__op);
2288}
2289
2290template <class _Tp>
2291_LIBCPP_INLINE_VISIBILITY
2292_Tp*
2293atomic_fetch_sub(atomic<_Tp*>* __o, typename atomic<_Tp*>::difference_type __op) _NOEXCEPT
2294{
2295    return __o->fetch_sub(__op);
2296}
2297
2298// atomic_fetch_sub_explicit
2299
2300template <class _Tp>
2301_LIBCPP_INLINE_VISIBILITY
2302typename enable_if
2303<
2304    is_integral<_Tp>::value && !is_same<_Tp, bool>::value && !is_const<_Tp>::value,
2305    _Tp
2306>::type
2307atomic_fetch_sub_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op, memory_order __m) _NOEXCEPT
2308{
2309    return __o->fetch_sub(__op, __m);
2310}
2311
2312template <class _Tp>
2313_LIBCPP_INLINE_VISIBILITY
2314typename enable_if
2315<
2316    is_integral<_Tp>::value && !is_same<_Tp, bool>::value && !is_const<_Tp>::value,
2317    _Tp
2318>::type
2319atomic_fetch_sub_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op, memory_order __m) _NOEXCEPT
2320{
2321    return __o->fetch_sub(__op, __m);
2322}
2323
2324template <class _Tp>
2325_LIBCPP_INLINE_VISIBILITY
2326_Tp*
2327atomic_fetch_sub_explicit(volatile atomic<_Tp*>* __o, typename atomic<_Tp*>::difference_type __op, memory_order __m) _NOEXCEPT
2328{
2329    return __o->fetch_sub(__op, __m);
2330}
2331
2332template <class _Tp>
2333_LIBCPP_INLINE_VISIBILITY
2334_Tp*
2335atomic_fetch_sub_explicit(atomic<_Tp*>* __o, typename atomic<_Tp*>::difference_type __op, memory_order __m) _NOEXCEPT
2336{
2337    return __o->fetch_sub(__op, __m);
2338}
2339
2340// atomic_fetch_and
2341
2342template <class _Tp>
2343_LIBCPP_INLINE_VISIBILITY
2344typename enable_if
2345<
2346    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2347    _Tp
2348>::type
2349atomic_fetch_and(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op) _NOEXCEPT
2350{
2351    return __o->fetch_and(__op);
2352}
2353
2354template <class _Tp>
2355_LIBCPP_INLINE_VISIBILITY
2356typename enable_if
2357<
2358    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2359    _Tp
2360>::type
2361atomic_fetch_and(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op) _NOEXCEPT
2362{
2363    return __o->fetch_and(__op);
2364}
2365
2366// atomic_fetch_and_explicit
2367
2368template <class _Tp>
2369_LIBCPP_INLINE_VISIBILITY
2370typename enable_if
2371<
2372    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2373    _Tp
2374>::type
2375atomic_fetch_and_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m) _NOEXCEPT
2376{
2377    return __o->fetch_and(__op, __m);
2378}
2379
2380template <class _Tp>
2381_LIBCPP_INLINE_VISIBILITY
2382typename enable_if
2383<
2384    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2385    _Tp
2386>::type
2387atomic_fetch_and_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m) _NOEXCEPT
2388{
2389    return __o->fetch_and(__op, __m);
2390}
2391
2392// atomic_fetch_or
2393
2394template <class _Tp>
2395_LIBCPP_INLINE_VISIBILITY
2396typename enable_if
2397<
2398    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2399    _Tp
2400>::type
2401atomic_fetch_or(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op) _NOEXCEPT
2402{
2403    return __o->fetch_or(__op);
2404}
2405
2406template <class _Tp>
2407_LIBCPP_INLINE_VISIBILITY
2408typename enable_if
2409<
2410    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2411    _Tp
2412>::type
2413atomic_fetch_or(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op) _NOEXCEPT
2414{
2415    return __o->fetch_or(__op);
2416}
2417
2418// atomic_fetch_or_explicit
2419
2420template <class _Tp>
2421_LIBCPP_INLINE_VISIBILITY
2422typename enable_if
2423<
2424    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2425    _Tp
2426>::type
2427atomic_fetch_or_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m) _NOEXCEPT
2428{
2429    return __o->fetch_or(__op, __m);
2430}
2431
2432template <class _Tp>
2433_LIBCPP_INLINE_VISIBILITY
2434typename enable_if
2435<
2436    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2437    _Tp
2438>::type
2439atomic_fetch_or_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m) _NOEXCEPT
2440{
2441    return __o->fetch_or(__op, __m);
2442}
2443
2444// atomic_fetch_xor
2445
2446template <class _Tp>
2447_LIBCPP_INLINE_VISIBILITY
2448typename enable_if
2449<
2450    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2451    _Tp
2452>::type
2453atomic_fetch_xor(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op) _NOEXCEPT
2454{
2455    return __o->fetch_xor(__op);
2456}
2457
2458template <class _Tp>
2459_LIBCPP_INLINE_VISIBILITY
2460typename enable_if
2461<
2462    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2463    _Tp
2464>::type
2465atomic_fetch_xor(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op) _NOEXCEPT
2466{
2467    return __o->fetch_xor(__op);
2468}
2469
2470// atomic_fetch_xor_explicit
2471
2472template <class _Tp>
2473_LIBCPP_INLINE_VISIBILITY
2474typename enable_if
2475<
2476    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2477    _Tp
2478>::type
2479atomic_fetch_xor_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m) _NOEXCEPT
2480{
2481    return __o->fetch_xor(__op, __m);
2482}
2483
2484template <class _Tp>
2485_LIBCPP_INLINE_VISIBILITY
2486typename enable_if
2487<
2488    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2489    _Tp
2490>::type
2491atomic_fetch_xor_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m) _NOEXCEPT
2492{
2493    return __o->fetch_xor(__op, __m);
2494}
2495
2496// flag type and operations
2497
2498typedef struct atomic_flag
2499{
2500    __cxx_atomic_impl<_LIBCPP_ATOMIC_FLAG_TYPE> __a_;
2501
2502    _LIBCPP_INLINE_VISIBILITY
2503    bool test(memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT
2504        {return _LIBCPP_ATOMIC_FLAG_TYPE(true) == __cxx_atomic_load(&__a_, __m);}
2505    _LIBCPP_INLINE_VISIBILITY
2506    bool test(memory_order __m = memory_order_seq_cst) const _NOEXCEPT
2507        {return _LIBCPP_ATOMIC_FLAG_TYPE(true) == __cxx_atomic_load(&__a_, __m);}
2508
2509    _LIBCPP_INLINE_VISIBILITY
2510    bool test_and_set(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
2511        {return __cxx_atomic_exchange(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(true), __m);}
2512    _LIBCPP_INLINE_VISIBILITY
2513    bool test_and_set(memory_order __m = memory_order_seq_cst) _NOEXCEPT
2514        {return __cxx_atomic_exchange(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(true), __m);}
2515    _LIBCPP_INLINE_VISIBILITY
2516    void clear(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
2517        {__cxx_atomic_store(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(false), __m);}
2518    _LIBCPP_INLINE_VISIBILITY
2519    void clear(memory_order __m = memory_order_seq_cst) _NOEXCEPT
2520        {__cxx_atomic_store(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(false), __m);}
2521
2522    _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2523    void wait(bool __v, memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT
2524        {__cxx_atomic_wait(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(__v), __m);}
2525    _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2526    void wait(bool __v, memory_order __m = memory_order_seq_cst) const _NOEXCEPT
2527        {__cxx_atomic_wait(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(__v), __m);}
2528    _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2529    void notify_one() volatile _NOEXCEPT
2530        {__cxx_atomic_notify_one(&__a_);}
2531    _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2532    void notify_one() _NOEXCEPT
2533        {__cxx_atomic_notify_one(&__a_);}
2534    _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2535    void notify_all() volatile _NOEXCEPT
2536        {__cxx_atomic_notify_all(&__a_);}
2537    _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2538    void notify_all() _NOEXCEPT
2539        {__cxx_atomic_notify_all(&__a_);}
2540
2541    _LIBCPP_INLINE_VISIBILITY
2542    atomic_flag() _NOEXCEPT _LIBCPP_DEFAULT
2543
2544    _LIBCPP_INLINE_VISIBILITY _LIBCPP_CONSTEXPR
2545    atomic_flag(bool __b) _NOEXCEPT : __a_(__b) {} // EXTENSION
2546
2547#ifndef _LIBCPP_CXX03_LANG
2548    atomic_flag(const atomic_flag&) = delete;
2549    atomic_flag& operator=(const atomic_flag&) = delete;
2550    atomic_flag& operator=(const atomic_flag&) volatile = delete;
2551#else
2552private:
2553    _LIBCPP_INLINE_VISIBILITY
2554    atomic_flag(const atomic_flag&);
2555    _LIBCPP_INLINE_VISIBILITY
2556    atomic_flag& operator=(const atomic_flag&);
2557    _LIBCPP_INLINE_VISIBILITY
2558    atomic_flag& operator=(const atomic_flag&) volatile;
2559#endif
2560} atomic_flag;
2561
2562
2563inline _LIBCPP_INLINE_VISIBILITY
2564bool
2565atomic_flag_test(const volatile atomic_flag* __o) _NOEXCEPT
2566{
2567    return __o->test();
2568}
2569
2570inline _LIBCPP_INLINE_VISIBILITY
2571bool
2572atomic_flag_test(const atomic_flag* __o) _NOEXCEPT
2573{
2574    return __o->test();
2575}
2576
2577inline _LIBCPP_INLINE_VISIBILITY
2578bool
2579atomic_flag_test_explicit(const volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
2580{
2581    return __o->test(__m);
2582}
2583
2584inline _LIBCPP_INLINE_VISIBILITY
2585bool
2586atomic_flag_test_explicit(const atomic_flag* __o, memory_order __m) _NOEXCEPT
2587{
2588    return __o->test(__m);
2589}
2590
2591inline _LIBCPP_INLINE_VISIBILITY
2592bool
2593atomic_flag_test_and_set(volatile atomic_flag* __o) _NOEXCEPT
2594{
2595    return __o->test_and_set();
2596}
2597
2598inline _LIBCPP_INLINE_VISIBILITY
2599bool
2600atomic_flag_test_and_set(atomic_flag* __o) _NOEXCEPT
2601{
2602    return __o->test_and_set();
2603}
2604
2605inline _LIBCPP_INLINE_VISIBILITY
2606bool
2607atomic_flag_test_and_set_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
2608{
2609    return __o->test_and_set(__m);
2610}
2611
2612inline _LIBCPP_INLINE_VISIBILITY
2613bool
2614atomic_flag_test_and_set_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT
2615{
2616    return __o->test_and_set(__m);
2617}
2618
2619inline _LIBCPP_INLINE_VISIBILITY
2620void
2621atomic_flag_clear(volatile atomic_flag* __o) _NOEXCEPT
2622{
2623    __o->clear();
2624}
2625
2626inline _LIBCPP_INLINE_VISIBILITY
2627void
2628atomic_flag_clear(atomic_flag* __o) _NOEXCEPT
2629{
2630    __o->clear();
2631}
2632
2633inline _LIBCPP_INLINE_VISIBILITY
2634void
2635atomic_flag_clear_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
2636{
2637    __o->clear(__m);
2638}
2639
2640inline _LIBCPP_INLINE_VISIBILITY
2641void
2642atomic_flag_clear_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT
2643{
2644    __o->clear(__m);
2645}
2646
2647inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC
2648void
2649atomic_flag_wait(const volatile atomic_flag* __o, bool __v) _NOEXCEPT
2650{
2651    __o->wait(__v);
2652}
2653
2654inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC
2655void
2656atomic_flag_wait(const atomic_flag* __o, bool __v) _NOEXCEPT
2657{
2658    __o->wait(__v);
2659}
2660
2661inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC
2662void
2663atomic_flag_wait_explicit(const volatile atomic_flag* __o,
2664                          bool __v, memory_order __m) _NOEXCEPT
2665{
2666    __o->wait(__v, __m);
2667}
2668
2669inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC
2670void
2671atomic_flag_wait_explicit(const atomic_flag* __o,
2672                          bool __v, memory_order __m) _NOEXCEPT
2673{
2674    __o->wait(__v, __m);
2675}
2676
2677inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC
2678void
2679atomic_flag_notify_one(volatile atomic_flag* __o) _NOEXCEPT
2680{
2681    __o->notify_one();
2682}
2683
2684inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC
2685void
2686atomic_flag_notify_one(atomic_flag* __o) _NOEXCEPT
2687{
2688    __o->notify_one();
2689}
2690
2691inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC
2692void
2693atomic_flag_notify_all(volatile atomic_flag* __o) _NOEXCEPT
2694{
2695    __o->notify_all();
2696}
2697
2698inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC
2699void
2700atomic_flag_notify_all(atomic_flag* __o) _NOEXCEPT
2701{
2702    __o->notify_all();
2703}
2704
2705// fences
2706
2707inline _LIBCPP_INLINE_VISIBILITY
2708void
2709atomic_thread_fence(memory_order __m) _NOEXCEPT
2710{
2711    __cxx_atomic_thread_fence(__m);
2712}
2713
2714inline _LIBCPP_INLINE_VISIBILITY
2715void
2716atomic_signal_fence(memory_order __m) _NOEXCEPT
2717{
2718    __cxx_atomic_signal_fence(__m);
2719}
2720
2721// Atomics for standard typedef types
2722
2723typedef atomic<bool>               atomic_bool;
2724typedef atomic<char>               atomic_char;
2725typedef atomic<signed char>        atomic_schar;
2726typedef atomic<unsigned char>      atomic_uchar;
2727typedef atomic<short>              atomic_short;
2728typedef atomic<unsigned short>     atomic_ushort;
2729typedef atomic<int>                atomic_int;
2730typedef atomic<unsigned int>       atomic_uint;
2731typedef atomic<long>               atomic_long;
2732typedef atomic<unsigned long>      atomic_ulong;
2733typedef atomic<long long>          atomic_llong;
2734typedef atomic<unsigned long long> atomic_ullong;
2735#ifndef _LIBCPP_NO_HAS_CHAR8_T
2736typedef atomic<char8_t>            atomic_char8_t;
2737#endif
2738typedef atomic<char16_t>           atomic_char16_t;
2739typedef atomic<char32_t>           atomic_char32_t;
2740typedef atomic<wchar_t>            atomic_wchar_t;
2741
2742typedef atomic<int_least8_t>   atomic_int_least8_t;
2743typedef atomic<uint_least8_t>  atomic_uint_least8_t;
2744typedef atomic<int_least16_t>  atomic_int_least16_t;
2745typedef atomic<uint_least16_t> atomic_uint_least16_t;
2746typedef atomic<int_least32_t>  atomic_int_least32_t;
2747typedef atomic<uint_least32_t> atomic_uint_least32_t;
2748typedef atomic<int_least64_t>  atomic_int_least64_t;
2749typedef atomic<uint_least64_t> atomic_uint_least64_t;
2750
2751typedef atomic<int_fast8_t>   atomic_int_fast8_t;
2752typedef atomic<uint_fast8_t>  atomic_uint_fast8_t;
2753typedef atomic<int_fast16_t>  atomic_int_fast16_t;
2754typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
2755typedef atomic<int_fast32_t>  atomic_int_fast32_t;
2756typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
2757typedef atomic<int_fast64_t>  atomic_int_fast64_t;
2758typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
2759
2760typedef atomic< int8_t>  atomic_int8_t;
2761typedef atomic<uint8_t>  atomic_uint8_t;
2762typedef atomic< int16_t> atomic_int16_t;
2763typedef atomic<uint16_t> atomic_uint16_t;
2764typedef atomic< int32_t> atomic_int32_t;
2765typedef atomic<uint32_t> atomic_uint32_t;
2766typedef atomic< int64_t> atomic_int64_t;
2767typedef atomic<uint64_t> atomic_uint64_t;
2768
2769typedef atomic<intptr_t>  atomic_intptr_t;
2770typedef atomic<uintptr_t> atomic_uintptr_t;
2771typedef atomic<size_t>    atomic_size_t;
2772typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
2773typedef atomic<intmax_t>  atomic_intmax_t;
2774typedef atomic<uintmax_t> atomic_uintmax_t;
2775
2776// atomic_*_lock_free : prefer the contention type most highly, then the largest lock-free type
2777
2778#ifdef __cpp_lib_atomic_is_always_lock_free
2779# define _LIBCPP_CONTENTION_LOCK_FREE __atomic_always_lock_free(sizeof(__cxx_contention_t), 0)
2780#else
2781# define _LIBCPP_CONTENTION_LOCK_FREE false
2782#endif
2783
2784#if ATOMIC_LLONG_LOCK_FREE == 2
2785typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, long long>::type          __libcpp_signed_lock_free;
2786typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, unsigned long long>::type __libcpp_unsigned_lock_free;
2787#elif ATOMIC_INT_LOCK_FREE == 2
2788typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, int>::type                __libcpp_signed_lock_free;
2789typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, unsigned int>::type       __libcpp_unsigned_lock_free;
2790#elif ATOMIC_SHORT_LOCK_FREE == 2
2791typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, short>::type              __libcpp_signed_lock_free;
2792typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, unsigned short>::type     __libcpp_unsigned_lock_free;
2793#elif ATOMIC_CHAR_LOCK_FREE == 2
2794typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, char>::type               __libcpp_signed_lock_free;
2795typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, unsigned char>::type      __libcpp_unsigned_lock_free;
2796#else
2797    // No signed/unsigned lock-free types
2798#endif
2799
2800typedef atomic<__libcpp_signed_lock_free> atomic_signed_lock_free;
2801typedef atomic<__libcpp_unsigned_lock_free> atomic_unsigned_lock_free;
2802
2803#define ATOMIC_FLAG_INIT {false}
2804#define ATOMIC_VAR_INIT(__v) {__v}
2805
2806_LIBCPP_END_NAMESPACE_STD
2807
2808#endif  // _LIBCPP_ATOMIC
2809