Coverage Report

Created: 2020-02-25 14:32

/Users/buildslave/jenkins/workspace/coverage/llvm-project/libcxx/include/atomic
Line
Count
Source (jump to first uncovered line)
1
// -*- C++ -*-
2
//===--------------------------- atomic -----------------------------------===//
3
//
4
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
5
// See https://llvm.org/LICENSE.txt for license information.
6
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
7
//
8
//===----------------------------------------------------------------------===//
9
10
#ifndef _LIBCPP_ATOMIC
11
#define _LIBCPP_ATOMIC
12
13
/*
14
    atomic synopsis
15
16
namespace std
17
{
18
19
// feature test macro
20
21
#define __cpp_lib_atomic_is_always_lock_free // as specified by SG10
22
23
 // order and consistency
24
25
 enum memory_order: unspecified // enum class in C++20
26
 {
27
    relaxed,
28
    consume, // load-consume
29
    acquire, // load-acquire
30
    release, // store-release
31
    acq_rel, // store-release load-acquire
32
    seq_cst // store-release load-acquire
33
 };
34
35
 inline constexpr auto memory_order_relaxed = memory_order::relaxed;
36
 inline constexpr auto memory_order_consume = memory_order::consume;
37
 inline constexpr auto memory_order_acquire = memory_order::acquire;
38
 inline constexpr auto memory_order_release = memory_order::release;
39
 inline constexpr auto memory_order_acq_rel = memory_order::acq_rel;
40
 inline constexpr auto memory_order_seq_cst = memory_order::seq_cst;
41
42
template <class T> T kill_dependency(T y) noexcept;
43
44
// lock-free property
45
46
#define ATOMIC_BOOL_LOCK_FREE unspecified
47
#define ATOMIC_CHAR_LOCK_FREE unspecified
48
#define ATOMIC_CHAR16_T_LOCK_FREE unspecified
49
#define ATOMIC_CHAR32_T_LOCK_FREE unspecified
50
#define ATOMIC_WCHAR_T_LOCK_FREE unspecified
51
#define ATOMIC_SHORT_LOCK_FREE unspecified
52
#define ATOMIC_INT_LOCK_FREE unspecified
53
#define ATOMIC_LONG_LOCK_FREE unspecified
54
#define ATOMIC_LLONG_LOCK_FREE unspecified
55
#define ATOMIC_POINTER_LOCK_FREE unspecified
56
57
// flag type and operations
58
59
typedef struct atomic_flag
60
{
61
    bool test_and_set(memory_order m = memory_order_seq_cst) volatile noexcept;
62
    bool test_and_set(memory_order m = memory_order_seq_cst) noexcept;
63
    void clear(memory_order m = memory_order_seq_cst) volatile noexcept;
64
    void clear(memory_order m = memory_order_seq_cst) noexcept;
65
    atomic_flag()  noexcept = default;
66
    atomic_flag(const atomic_flag&) = delete;
67
    atomic_flag& operator=(const atomic_flag&) = delete;
68
    atomic_flag& operator=(const atomic_flag&) volatile = delete;
69
} atomic_flag;
70
71
bool
72
    atomic_flag_test_and_set(volatile atomic_flag* obj) noexcept;
73
74
bool
75
    atomic_flag_test_and_set(atomic_flag* obj) noexcept;
76
77
bool
78
    atomic_flag_test_and_set_explicit(volatile atomic_flag* obj,
79
                                      memory_order m) noexcept;
80
81
bool
82
    atomic_flag_test_and_set_explicit(atomic_flag* obj, memory_order m) noexcept;
83
84
void
85
    atomic_flag_clear(volatile atomic_flag* obj) noexcept;
86
87
void
88
    atomic_flag_clear(atomic_flag* obj) noexcept;
89
90
void
91
    atomic_flag_clear_explicit(volatile atomic_flag* obj, memory_order m) noexcept;
92
93
void
94
    atomic_flag_clear_explicit(atomic_flag* obj, memory_order m) noexcept;
95
96
#define ATOMIC_FLAG_INIT see below
97
#define ATOMIC_VAR_INIT(value) see below
98
99
template <class T>
100
struct atomic
101
{
102
    static constexpr bool is_always_lock_free;
103
    bool is_lock_free() const volatile noexcept;
104
    bool is_lock_free() const noexcept;
105
    void store(T desr, memory_order m = memory_order_seq_cst) volatile noexcept;
106
    void store(T desr, memory_order m = memory_order_seq_cst) noexcept;
107
    T load(memory_order m = memory_order_seq_cst) const volatile noexcept;
108
    T load(memory_order m = memory_order_seq_cst) const noexcept;
109
    operator T() const volatile noexcept;
110
    operator T() const noexcept;
111
    T exchange(T desr, memory_order m = memory_order_seq_cst) volatile noexcept;
112
    T exchange(T desr, memory_order m = memory_order_seq_cst) noexcept;
113
    bool compare_exchange_weak(T& expc, T desr,
114
                               memory_order s, memory_order f) volatile noexcept;
115
    bool compare_exchange_weak(T& expc, T desr, memory_order s, memory_order f) noexcept;
116
    bool compare_exchange_strong(T& expc, T desr,
117
                                 memory_order s, memory_order f) volatile noexcept;
118
    bool compare_exchange_strong(T& expc, T desr,
119
                                 memory_order s, memory_order f) noexcept;
120
    bool compare_exchange_weak(T& expc, T desr,
121
                               memory_order m = memory_order_seq_cst) volatile noexcept;
122
    bool compare_exchange_weak(T& expc, T desr,
123
                               memory_order m = memory_order_seq_cst) noexcept;
124
    bool compare_exchange_strong(T& expc, T desr,
125
                                memory_order m = memory_order_seq_cst) volatile noexcept;
126
    bool compare_exchange_strong(T& expc, T desr,
127
                                 memory_order m = memory_order_seq_cst) noexcept;
128
129
    atomic() noexcept = default;
130
    constexpr atomic(T desr) noexcept;
131
    atomic(const atomic&) = delete;
132
    atomic& operator=(const atomic&) = delete;
133
    atomic& operator=(const atomic&) volatile = delete;
134
    T operator=(T) volatile noexcept;
135
    T operator=(T) noexcept;
136
};
137
138
template <>
139
struct atomic<integral>
140
{
141
    static constexpr bool is_always_lock_free;
142
    bool is_lock_free() const volatile noexcept;
143
    bool is_lock_free() const noexcept;
144
    void store(integral desr, memory_order m = memory_order_seq_cst) volatile noexcept;
145
    void store(integral desr, memory_order m = memory_order_seq_cst) noexcept;
146
    integral load(memory_order m = memory_order_seq_cst) const volatile noexcept;
147
    integral load(memory_order m = memory_order_seq_cst) const noexcept;
148
    operator integral() const volatile noexcept;
149
    operator integral() const noexcept;
150
    integral exchange(integral desr,
151
                      memory_order m = memory_order_seq_cst) volatile noexcept;
152
    integral exchange(integral desr, memory_order m = memory_order_seq_cst) noexcept;
153
    bool compare_exchange_weak(integral& expc, integral desr,
154
                               memory_order s, memory_order f) volatile noexcept;
155
    bool compare_exchange_weak(integral& expc, integral desr,
156
                               memory_order s, memory_order f) noexcept;
157
    bool compare_exchange_strong(integral& expc, integral desr,
158
                                 memory_order s, memory_order f) volatile noexcept;
159
    bool compare_exchange_strong(integral& expc, integral desr,
160
                                 memory_order s, memory_order f) noexcept;
161
    bool compare_exchange_weak(integral& expc, integral desr,
162
                               memory_order m = memory_order_seq_cst) volatile noexcept;
163
    bool compare_exchange_weak(integral& expc, integral desr,
164
                               memory_order m = memory_order_seq_cst) noexcept;
165
    bool compare_exchange_strong(integral& expc, integral desr,
166
                                memory_order m = memory_order_seq_cst) volatile noexcept;
167
    bool compare_exchange_strong(integral& expc, integral desr,
168
                                 memory_order m = memory_order_seq_cst) noexcept;
169
170
    integral
171
        fetch_add(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
172
    integral fetch_add(integral op, memory_order m = memory_order_seq_cst) noexcept;
173
    integral
174
        fetch_sub(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
175
    integral fetch_sub(integral op, memory_order m = memory_order_seq_cst) noexcept;
176
    integral
177
        fetch_and(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
178
    integral fetch_and(integral op, memory_order m = memory_order_seq_cst) noexcept;
179
    integral
180
        fetch_or(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
181
    integral fetch_or(integral op, memory_order m = memory_order_seq_cst) noexcept;
182
    integral
183
        fetch_xor(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
184
    integral fetch_xor(integral op, memory_order m = memory_order_seq_cst) noexcept;
185
186
    atomic() noexcept = default;
187
    constexpr atomic(integral desr) noexcept;
188
    atomic(const atomic&) = delete;
189
    atomic& operator=(const atomic&) = delete;
190
    atomic& operator=(const atomic&) volatile = delete;
191
    integral operator=(integral desr) volatile noexcept;
192
    integral operator=(integral desr) noexcept;
193
194
    integral operator++(int) volatile noexcept;
195
    integral operator++(int) noexcept;
196
    integral operator--(int) volatile noexcept;
197
    integral operator--(int) noexcept;
198
    integral operator++() volatile noexcept;
199
    integral operator++() noexcept;
200
    integral operator--() volatile noexcept;
201
    integral operator--() noexcept;
202
    integral operator+=(integral op) volatile noexcept;
203
    integral operator+=(integral op) noexcept;
204
    integral operator-=(integral op) volatile noexcept;
205
    integral operator-=(integral op) noexcept;
206
    integral operator&=(integral op) volatile noexcept;
207
    integral operator&=(integral op) noexcept;
208
    integral operator|=(integral op) volatile noexcept;
209
    integral operator|=(integral op) noexcept;
210
    integral operator^=(integral op) volatile noexcept;
211
    integral operator^=(integral op) noexcept;
212
};
213
214
template <class T>
215
struct atomic<T*>
216
{
217
    static constexpr bool is_always_lock_free;
218
    bool is_lock_free() const volatile noexcept;
219
    bool is_lock_free() const noexcept;
220
    void store(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept;
221
    void store(T* desr, memory_order m = memory_order_seq_cst) noexcept;
222
    T* load(memory_order m = memory_order_seq_cst) const volatile noexcept;
223
    T* load(memory_order m = memory_order_seq_cst) const noexcept;
224
    operator T*() const volatile noexcept;
225
    operator T*() const noexcept;
226
    T* exchange(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept;
227
    T* exchange(T* desr, memory_order m = memory_order_seq_cst) noexcept;
228
    bool compare_exchange_weak(T*& expc, T* desr,
229
                               memory_order s, memory_order f) volatile noexcept;
230
    bool compare_exchange_weak(T*& expc, T* desr,
231
                               memory_order s, memory_order f) noexcept;
232
    bool compare_exchange_strong(T*& expc, T* desr,
233
                                 memory_order s, memory_order f) volatile noexcept;
234
    bool compare_exchange_strong(T*& expc, T* desr,
235
                                 memory_order s, memory_order f) noexcept;
236
    bool compare_exchange_weak(T*& expc, T* desr,
237
                               memory_order m = memory_order_seq_cst) volatile noexcept;
238
    bool compare_exchange_weak(T*& expc, T* desr,
239
                               memory_order m = memory_order_seq_cst) noexcept;
240
    bool compare_exchange_strong(T*& expc, T* desr,
241
                                memory_order m = memory_order_seq_cst) volatile noexcept;
242
    bool compare_exchange_strong(T*& expc, T* desr,
243
                                 memory_order m = memory_order_seq_cst) noexcept;
244
    T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept;
245
    T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept;
246
    T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept;
247
    T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept;
248
249
    atomic() noexcept = default;
250
    constexpr atomic(T* desr) noexcept;
251
    atomic(const atomic&) = delete;
252
    atomic& operator=(const atomic&) = delete;
253
    atomic& operator=(const atomic&) volatile = delete;
254
255
    T* operator=(T*) volatile noexcept;
256
    T* operator=(T*) noexcept;
257
    T* operator++(int) volatile noexcept;
258
    T* operator++(int) noexcept;
259
    T* operator--(int) volatile noexcept;
260
    T* operator--(int) noexcept;
261
    T* operator++() volatile noexcept;
262
    T* operator++() noexcept;
263
    T* operator--() volatile noexcept;
264
    T* operator--() noexcept;
265
    T* operator+=(ptrdiff_t op) volatile noexcept;
266
    T* operator+=(ptrdiff_t op) noexcept;
267
    T* operator-=(ptrdiff_t op) volatile noexcept;
268
    T* operator-=(ptrdiff_t op) noexcept;
269
};
270
271
272
template <class T>
273
    bool
274
    atomic_is_lock_free(const volatile atomic<T>* obj) noexcept;
275
276
template <class T>
277
    bool
278
    atomic_is_lock_free(const atomic<T>* obj) noexcept;
279
280
template <class T>
281
    void
282
    atomic_init(volatile atomic<T>* obj, T desr) noexcept;
283
284
template <class T>
285
    void
286
    atomic_init(atomic<T>* obj, T desr) noexcept;
287
288
template <class T>
289
    void
290
    atomic_store(volatile atomic<T>* obj, T desr) noexcept;
291
292
template <class T>
293
    void
294
    atomic_store(atomic<T>* obj, T desr) noexcept;
295
296
template <class T>
297
    void
298
    atomic_store_explicit(volatile atomic<T>* obj, T desr, memory_order m) noexcept;
299
300
template <class T>
301
    void
302
    atomic_store_explicit(atomic<T>* obj, T desr, memory_order m) noexcept;
303
304
template <class T>
305
    T
306
    atomic_load(const volatile atomic<T>* obj) noexcept;
307
308
template <class T>
309
    T
310
    atomic_load(const atomic<T>* obj) noexcept;
311
312
template <class T>
313
    T
314
    atomic_load_explicit(const volatile atomic<T>* obj, memory_order m) noexcept;
315
316
template <class T>
317
    T
318
    atomic_load_explicit(const atomic<T>* obj, memory_order m) noexcept;
319
320
template <class T>
321
    T
322
    atomic_exchange(volatile atomic<T>* obj, T desr) noexcept;
323
324
template <class T>
325
    T
326
    atomic_exchange(atomic<T>* obj, T desr) noexcept;
327
328
template <class T>
329
    T
330
    atomic_exchange_explicit(volatile atomic<T>* obj, T desr, memory_order m) noexcept;
331
332
template <class T>
333
    T
334
    atomic_exchange_explicit(atomic<T>* obj, T desr, memory_order m) noexcept;
335
336
template <class T>
337
    bool
338
    atomic_compare_exchange_weak(volatile atomic<T>* obj, T* expc, T desr) noexcept;
339
340
template <class T>
341
    bool
342
    atomic_compare_exchange_weak(atomic<T>* obj, T* expc, T desr) noexcept;
343
344
template <class T>
345
    bool
346
    atomic_compare_exchange_strong(volatile atomic<T>* obj, T* expc, T desr) noexcept;
347
348
template <class T>
349
    bool
350
    atomic_compare_exchange_strong(atomic<T>* obj, T* expc, T desr) noexcept;
351
352
template <class T>
353
    bool
354
    atomic_compare_exchange_weak_explicit(volatile atomic<T>* obj, T* expc,
355
                                          T desr,
356
                                          memory_order s, memory_order f) noexcept;
357
358
template <class T>
359
    bool
360
    atomic_compare_exchange_weak_explicit(atomic<T>* obj, T* expc, T desr,
361
                                          memory_order s, memory_order f) noexcept;
362
363
template <class T>
364
    bool
365
    atomic_compare_exchange_strong_explicit(volatile atomic<T>* obj,
366
                                            T* expc, T desr,
367
                                            memory_order s, memory_order f) noexcept;
368
369
template <class T>
370
    bool
371
    atomic_compare_exchange_strong_explicit(atomic<T>* obj, T* expc,
372
                                            T desr,
373
                                            memory_order s, memory_order f) noexcept;
374
375
template <class Integral>
376
    Integral
377
    atomic_fetch_add(volatile atomic<Integral>* obj, Integral op) noexcept;
378
379
template <class Integral>
380
    Integral
381
    atomic_fetch_add(atomic<Integral>* obj, Integral op) noexcept;
382
383
template <class Integral>
384
    Integral
385
    atomic_fetch_add_explicit(volatile atomic<Integral>* obj, Integral op,
386
                              memory_order m) noexcept;
387
template <class Integral>
388
    Integral
389
    atomic_fetch_add_explicit(atomic<Integral>* obj, Integral op,
390
                              memory_order m) noexcept;
391
template <class Integral>
392
    Integral
393
    atomic_fetch_sub(volatile atomic<Integral>* obj, Integral op) noexcept;
394
395
template <class Integral>
396
    Integral
397
    atomic_fetch_sub(atomic<Integral>* obj, Integral op) noexcept;
398
399
template <class Integral>
400
    Integral
401
    atomic_fetch_sub_explicit(volatile atomic<Integral>* obj, Integral op,
402
                              memory_order m) noexcept;
403
template <class Integral>
404
    Integral
405
    atomic_fetch_sub_explicit(atomic<Integral>* obj, Integral op,
406
                              memory_order m) noexcept;
407
template <class Integral>
408
    Integral
409
    atomic_fetch_and(volatile atomic<Integral>* obj, Integral op) noexcept;
410
411
template <class Integral>
412
    Integral
413
    atomic_fetch_and(atomic<Integral>* obj, Integral op) noexcept;
414
415
template <class Integral>
416
    Integral
417
    atomic_fetch_and_explicit(volatile atomic<Integral>* obj, Integral op,
418
                              memory_order m) noexcept;
419
template <class Integral>
420
    Integral
421
    atomic_fetch_and_explicit(atomic<Integral>* obj, Integral op,
422
                              memory_order m) noexcept;
423
template <class Integral>
424
    Integral
425
    atomic_fetch_or(volatile atomic<Integral>* obj, Integral op) noexcept;
426
427
template <class Integral>
428
    Integral
429
    atomic_fetch_or(atomic<Integral>* obj, Integral op) noexcept;
430
431
template <class Integral>
432
    Integral
433
    atomic_fetch_or_explicit(volatile atomic<Integral>* obj, Integral op,
434
                             memory_order m) noexcept;
435
template <class Integral>
436
    Integral
437
    atomic_fetch_or_explicit(atomic<Integral>* obj, Integral op,
438
                             memory_order m) noexcept;
439
template <class Integral>
440
    Integral
441
    atomic_fetch_xor(volatile atomic<Integral>* obj, Integral op) noexcept;
442
443
template <class Integral>
444
    Integral
445
    atomic_fetch_xor(atomic<Integral>* obj, Integral op) noexcept;
446
447
template <class Integral>
448
    Integral
449
    atomic_fetch_xor_explicit(volatile atomic<Integral>* obj, Integral op,
450
                              memory_order m) noexcept;
451
template <class Integral>
452
    Integral
453
    atomic_fetch_xor_explicit(atomic<Integral>* obj, Integral op,
454
                              memory_order m) noexcept;
455
456
template <class T>
457
    T*
458
    atomic_fetch_add(volatile atomic<T*>* obj, ptrdiff_t op) noexcept;
459
460
template <class T>
461
    T*
462
    atomic_fetch_add(atomic<T*>* obj, ptrdiff_t op) noexcept;
463
464
template <class T>
465
    T*
466
    atomic_fetch_add_explicit(volatile atomic<T*>* obj, ptrdiff_t op,
467
                              memory_order m) noexcept;
468
template <class T>
469
    T*
470
    atomic_fetch_add_explicit(atomic<T*>* obj, ptrdiff_t op, memory_order m) noexcept;
471
472
template <class T>
473
    T*
474
    atomic_fetch_sub(volatile atomic<T*>* obj, ptrdiff_t op) noexcept;
475
476
template <class T>
477
    T*
478
    atomic_fetch_sub(atomic<T*>* obj, ptrdiff_t op) noexcept;
479
480
template <class T>
481
    T*
482
    atomic_fetch_sub_explicit(volatile atomic<T*>* obj, ptrdiff_t op,
483
                              memory_order m) noexcept;
484
template <class T>
485
    T*
486
    atomic_fetch_sub_explicit(atomic<T*>* obj, ptrdiff_t op, memory_order m) noexcept;
487
488
// Atomics for standard typedef types
489
490
typedef atomic<bool>               atomic_bool;
491
typedef atomic<char>               atomic_char;
492
typedef atomic<signed char>        atomic_schar;
493
typedef atomic<unsigned char>      atomic_uchar;
494
typedef atomic<short>              atomic_short;
495
typedef atomic<unsigned short>     atomic_ushort;
496
typedef atomic<int>                atomic_int;
497
typedef atomic<unsigned int>       atomic_uint;
498
typedef atomic<long>               atomic_long;
499
typedef atomic<unsigned long>      atomic_ulong;
500
typedef atomic<long long>          atomic_llong;
501
typedef atomic<unsigned long long> atomic_ullong;
502
typedef atomic<char16_t>           atomic_char16_t;
503
typedef atomic<char32_t>           atomic_char32_t;
504
typedef atomic<wchar_t>            atomic_wchar_t;
505
506
typedef atomic<int_least8_t>   atomic_int_least8_t;
507
typedef atomic<uint_least8_t>  atomic_uint_least8_t;
508
typedef atomic<int_least16_t>  atomic_int_least16_t;
509
typedef atomic<uint_least16_t> atomic_uint_least16_t;
510
typedef atomic<int_least32_t>  atomic_int_least32_t;
511
typedef atomic<uint_least32_t> atomic_uint_least32_t;
512
typedef atomic<int_least64_t>  atomic_int_least64_t;
513
typedef atomic<uint_least64_t> atomic_uint_least64_t;
514
515
typedef atomic<int_fast8_t>   atomic_int_fast8_t;
516
typedef atomic<uint_fast8_t>  atomic_uint_fast8_t;
517
typedef atomic<int_fast16_t>  atomic_int_fast16_t;
518
typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
519
typedef atomic<int_fast32_t>  atomic_int_fast32_t;
520
typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
521
typedef atomic<int_fast64_t>  atomic_int_fast64_t;
522
typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
523
524
typedef atomic<int8_t>   atomic_int8_t;
525
typedef atomic<uint8_t>  atomic_uint8_t;
526
typedef atomic<int16_t>  atomic_int16_t;
527
typedef atomic<uint16_t> atomic_uint16_t;
528
typedef atomic<int32_t>  atomic_int32_t;
529
typedef atomic<uint32_t> atomic_uint32_t;
530
typedef atomic<int64_t>  atomic_int64_t;
531
typedef atomic<uint64_t> atomic_uint64_t;
532
533
typedef atomic<intptr_t>  atomic_intptr_t;
534
typedef atomic<uintptr_t> atomic_uintptr_t;
535
typedef atomic<size_t>    atomic_size_t;
536
typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
537
typedef atomic<intmax_t>  atomic_intmax_t;
538
typedef atomic<uintmax_t> atomic_uintmax_t;
539
540
// fences
541
542
void atomic_thread_fence(memory_order m) noexcept;
543
void atomic_signal_fence(memory_order m) noexcept;
544
545
}  // std
546
547
*/
548
549
#include <__config>
550
#include <__threading_support>
551
#include <cstddef>
552
#include <cstdint>
553
#include <cstring>
554
#include <type_traits>
555
#include <version>
556
557
#if !defined(_LIBCPP_HAS_NO_PRAGMA_SYSTEM_HEADER)
558
#pragma GCC system_header
559
#endif
560
561
#ifdef _LIBCPP_HAS_NO_THREADS
562
# error <atomic> is not supported on this single threaded system
563
#endif
564
#ifdef _LIBCPP_HAS_NO_ATOMIC_HEADER
565
# error <atomic> is not implemented
566
#endif
567
#ifdef kill_dependency
568
# error C++ standard library is incompatible with <stdatomic.h>
569
#endif
570
571
#define _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m) \
572
  _LIBCPP_DIAGNOSE_WARNING(__m == memory_order_consume || \
573
                           __m == memory_order_acquire || \
574
                           __m == memory_order_acq_rel,   \
575
                        "memory order argument to atomic operation is invalid")
576
577
#define _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m) \
578
  _LIBCPP_DIAGNOSE_WARNING(__m == memory_order_release || \
579
                           __m == memory_order_acq_rel,   \
580
                        "memory order argument to atomic operation is invalid")
581
582
#define _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__m, __f) \
583
  _LIBCPP_DIAGNOSE_WARNING(__f == memory_order_release || \
584
                           __f == memory_order_acq_rel,   \
585
                        "memory order argument to atomic operation is invalid")
586
587
_LIBCPP_BEGIN_NAMESPACE_STD
588
589
// Figure out what the underlying type for `memory_order` would be if it were
590
// declared as an unscoped enum (accounting for -fshort-enums). Use this result
591
// to pin the underlying type in C++20.
592
enum __legacy_memory_order {
593
    __mo_relaxed,
594
    __mo_consume,
595
    __mo_acquire,
596
    __mo_release,
597
    __mo_acq_rel,
598
    __mo_seq_cst
599
};
600
601
typedef underlying_type<__legacy_memory_order>::type __memory_order_underlying_t;
602
603
#if _LIBCPP_STD_VER > 17
604
605
enum class memory_order : __memory_order_underlying_t {
606
  relaxed = __mo_relaxed,
607
  consume = __mo_consume,
608
  acquire = __mo_acquire,
609
  release = __mo_release,
610
  acq_rel = __mo_acq_rel,
611
  seq_cst = __mo_seq_cst
612
};
613
614
inline constexpr auto memory_order_relaxed = memory_order::relaxed;
615
inline constexpr auto memory_order_consume = memory_order::consume;
616
inline constexpr auto memory_order_acquire = memory_order::acquire;
617
inline constexpr auto memory_order_release = memory_order::release;
618
inline constexpr auto memory_order_acq_rel = memory_order::acq_rel;
619
inline constexpr auto memory_order_seq_cst = memory_order::seq_cst;
620
621
#else
622
623
typedef enum memory_order {
624
  memory_order_relaxed = __mo_relaxed,
625
  memory_order_consume = __mo_consume,
626
  memory_order_acquire = __mo_acquire,
627
  memory_order_release = __mo_release,
628
  memory_order_acq_rel = __mo_acq_rel,
629
  memory_order_seq_cst = __mo_seq_cst,
630
} memory_order;
631
632
#endif // _LIBCPP_STD_VER > 17
633
634
template <typename _Tp> _LIBCPP_INLINE_VISIBILITY
635
0
bool __cxx_nonatomic_compare_equal(_Tp const& __lhs, _Tp const& __rhs) {
636
0
    return memcmp(&__lhs, &__rhs, sizeof(_Tp)) == 0;
637
0
}
Unexecuted instantiation: bool std::__1::__cxx_nonatomic_compare_equal<bool>(bool const&, bool const&)
Unexecuted instantiation: bool std::__1::__cxx_nonatomic_compare_equal<long long>(long long const&, long long const&)
638
639
static_assert((is_same<underlying_type<memory_order>::type, __memory_order_underlying_t>::value),
640
  "unexpected underlying type for std::memory_order");
641
642
#if defined(_LIBCPP_HAS_GCC_ATOMIC_IMP) || \
643
  defined(_LIBCPP_ATOMIC_ONLY_USE_BUILTINS)
644
645
// [atomics.types.generic]p1 guarantees _Tp is trivially copyable. Because
646
// the default operator= in an object is not volatile, a byte-by-byte copy
647
// is required.
648
template <typename _Tp, typename _Tv> _LIBCPP_INLINE_VISIBILITY
649
typename enable_if<is_assignable<_Tp&, _Tv>::value>::type
650
__cxx_atomic_assign_volatile(_Tp& __a_value, _Tv const& __val) {
651
  __a_value = __val;
652
}
653
template <typename _Tp, typename _Tv> _LIBCPP_INLINE_VISIBILITY
654
typename enable_if<is_assignable<_Tp&, _Tv>::value>::type
655
__cxx_atomic_assign_volatile(_Tp volatile& __a_value, _Tv volatile const& __val) {
656
  volatile char* __to = reinterpret_cast<volatile char*>(&__a_value);
657
  volatile char* __end = __to + sizeof(_Tp);
658
  volatile const char* __from = reinterpret_cast<volatile const char*>(&__val);
659
  while (__to != __end)
660
    *__to++ = *__from++;
661
}
662
663
#endif
664
665
#if defined(_LIBCPP_HAS_GCC_ATOMIC_IMP)
666
667
template <typename _Tp>
668
struct __cxx_atomic_base_impl {
669
670
  _LIBCPP_INLINE_VISIBILITY
671
#ifndef _LIBCPP_CXX03_LANG
672
    __cxx_atomic_base_impl() _NOEXCEPT = default;
673
#else
674
    __cxx_atomic_base_impl() _NOEXCEPT : __a_value() {}
675
#endif // _LIBCPP_CXX03_LANG
676
  _LIBCPP_CONSTEXPR explicit __cxx_atomic_base_impl(_Tp value) _NOEXCEPT
677
    : __a_value(value) {}
678
  _Tp __a_value;
679
};
680
681
_LIBCPP_INLINE_VISIBILITY inline _LIBCPP_CONSTEXPR int __to_gcc_order(memory_order __order) {
682
  // Avoid switch statement to make this a constexpr.
683
  return __order == memory_order_relaxed ? __ATOMIC_RELAXED:
684
         (__order == memory_order_acquire ? __ATOMIC_ACQUIRE:
685
          (__order == memory_order_release ? __ATOMIC_RELEASE:
686
           (__order == memory_order_seq_cst ? __ATOMIC_SEQ_CST:
687
            (__order == memory_order_acq_rel ? __ATOMIC_ACQ_REL:
688
              __ATOMIC_CONSUME))));
689
}
690
691
_LIBCPP_INLINE_VISIBILITY inline _LIBCPP_CONSTEXPR int __to_gcc_failure_order(memory_order __order) {
692
  // Avoid switch statement to make this a constexpr.
693
  return __order == memory_order_relaxed ? __ATOMIC_RELAXED:
694
         (__order == memory_order_acquire ? __ATOMIC_ACQUIRE:
695
          (__order == memory_order_release ? __ATOMIC_RELAXED:
696
           (__order == memory_order_seq_cst ? __ATOMIC_SEQ_CST:
697
            (__order == memory_order_acq_rel ? __ATOMIC_ACQUIRE:
698
              __ATOMIC_CONSUME))));
699
}
700
701
template <typename _Tp>
702
_LIBCPP_INLINE_VISIBILITY
703
void __cxx_atomic_init(volatile __cxx_atomic_base_impl<_Tp>* __a,  _Tp __val) {
704
  __cxx_atomic_assign_volatile(__a->__a_value, __val);
705
}
706
707
template <typename _Tp>
708
_LIBCPP_INLINE_VISIBILITY
709
void __cxx_atomic_init(__cxx_atomic_base_impl<_Tp>* __a,  _Tp __val) {
710
  __a->__a_value = __val;
711
}
712
713
_LIBCPP_INLINE_VISIBILITY inline
714
void __cxx_atomic_thread_fence(memory_order __order) {
715
  __atomic_thread_fence(__to_gcc_order(__order));
716
}
717
718
_LIBCPP_INLINE_VISIBILITY inline
719
void __cxx_atomic_signal_fence(memory_order __order) {
720
  __atomic_signal_fence(__to_gcc_order(__order));
721
}
722
723
template <typename _Tp>
724
_LIBCPP_INLINE_VISIBILITY
725
void __cxx_atomic_store(volatile __cxx_atomic_base_impl<_Tp>* __a,  _Tp __val,
726
                        memory_order __order) {
727
  __atomic_store(&__a->__a_value, &__val,
728
                 __to_gcc_order(__order));
729
}
730
731
template <typename _Tp>
732
_LIBCPP_INLINE_VISIBILITY
733
void __cxx_atomic_store(__cxx_atomic_base_impl<_Tp>* __a,  _Tp __val,
734
                        memory_order __order) {
735
  __atomic_store(&__a->__a_value, &__val,
736
                 __to_gcc_order(__order));
737
}
738
739
template <typename _Tp>
740
_LIBCPP_INLINE_VISIBILITY
741
_Tp __cxx_atomic_load(const volatile __cxx_atomic_base_impl<_Tp>* __a,
742
                      memory_order __order) {
743
  _Tp __ret;
744
  __atomic_load(&__a->__a_value, &__ret,
745
                __to_gcc_order(__order));
746
  return __ret;
747
}
748
749
template <typename _Tp>
750
_LIBCPP_INLINE_VISIBILITY
751
_Tp __cxx_atomic_load(const __cxx_atomic_base_impl<_Tp>* __a, memory_order __order) {
752
  _Tp __ret;
753
  __atomic_load(&__a->__a_value, &__ret,
754
                __to_gcc_order(__order));
755
  return __ret;
756
}
757
758
template <typename _Tp>
759
_LIBCPP_INLINE_VISIBILITY
760
_Tp __cxx_atomic_exchange(volatile __cxx_atomic_base_impl<_Tp>* __a,
761
                          _Tp __value, memory_order __order) {
762
  _Tp __ret;
763
  __atomic_exchange(&__a->__a_value, &__value, &__ret,
764
                    __to_gcc_order(__order));
765
  return __ret;
766
}
767
768
template <typename _Tp>
769
_LIBCPP_INLINE_VISIBILITY
770
_Tp __cxx_atomic_exchange(__cxx_atomic_base_impl<_Tp>* __a, _Tp __value,
771
                          memory_order __order) {
772
  _Tp __ret;
773
  __atomic_exchange(&__a->__a_value, &__value, &__ret,
774
                    __to_gcc_order(__order));
775
  return __ret;
776
}
777
778
template <typename _Tp>
779
_LIBCPP_INLINE_VISIBILITY
780
bool __cxx_atomic_compare_exchange_strong(
781
    volatile __cxx_atomic_base_impl<_Tp>* __a, _Tp* __expected, _Tp __value,
782
    memory_order __success, memory_order __failure) {
783
  return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
784
                                   false,
785
                                   __to_gcc_order(__success),
786
                                   __to_gcc_failure_order(__failure));
787
}
788
789
template <typename _Tp>
790
_LIBCPP_INLINE_VISIBILITY
791
bool __cxx_atomic_compare_exchange_strong(
792
    __cxx_atomic_base_impl<_Tp>* __a, _Tp* __expected, _Tp __value, memory_order __success,
793
    memory_order __failure) {
794
  return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
795
                                   false,
796
                                   __to_gcc_order(__success),
797
                                   __to_gcc_failure_order(__failure));
798
}
799
800
template <typename _Tp>
801
_LIBCPP_INLINE_VISIBILITY
802
bool __cxx_atomic_compare_exchange_weak(
803
    volatile __cxx_atomic_base_impl<_Tp>* __a, _Tp* __expected, _Tp __value,
804
    memory_order __success, memory_order __failure) {
805
  return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
806
                                   true,
807
                                   __to_gcc_order(__success),
808
                                   __to_gcc_failure_order(__failure));
809
}
810
811
template <typename _Tp>
812
_LIBCPP_INLINE_VISIBILITY
813
bool __cxx_atomic_compare_exchange_weak(
814
    __cxx_atomic_base_impl<_Tp>* __a, _Tp* __expected, _Tp __value, memory_order __success,
815
    memory_order __failure) {
816
  return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
817
                                   true,
818
                                   __to_gcc_order(__success),
819
                                   __to_gcc_failure_order(__failure));
820
}
821
822
template <typename _Tp>
823
struct __skip_amt { enum {value = 1}; };
824
825
template <typename _Tp>
826
struct __skip_amt<_Tp*> { enum {value = sizeof(_Tp)}; };
827
828
// FIXME: Haven't figured out what the spec says about using arrays with
829
// atomic_fetch_add. Force a failure rather than creating bad behavior.
830
template <typename _Tp>
831
struct __skip_amt<_Tp[]> { };
832
template <typename _Tp, int n>
833
struct __skip_amt<_Tp[n]> { };
834
835
template <typename _Tp, typename _Td>
836
_LIBCPP_INLINE_VISIBILITY
837
_Tp __cxx_atomic_fetch_add(volatile __cxx_atomic_base_impl<_Tp>* __a,
838
                           _Td __delta, memory_order __order) {
839
  return __atomic_fetch_add(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
840
                            __to_gcc_order(__order));
841
}
842
843
template <typename _Tp, typename _Td>
844
_LIBCPP_INLINE_VISIBILITY
845
_Tp __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp>* __a, _Td __delta,
846
                           memory_order __order) {
847
  return __atomic_fetch_add(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
848
                            __to_gcc_order(__order));
849
}
850
851
template <typename _Tp, typename _Td>
852
_LIBCPP_INLINE_VISIBILITY
853
_Tp __cxx_atomic_fetch_sub(volatile __cxx_atomic_base_impl<_Tp>* __a,
854
                           _Td __delta, memory_order __order) {
855
  return __atomic_fetch_sub(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
856
                            __to_gcc_order(__order));
857
}
858
859
template <typename _Tp, typename _Td>
860
_LIBCPP_INLINE_VISIBILITY
861
_Tp __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp>* __a, _Td __delta,
862
                           memory_order __order) {
863
  return __atomic_fetch_sub(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
864
                            __to_gcc_order(__order));
865
}
866
867
template <typename _Tp>
868
_LIBCPP_INLINE_VISIBILITY
869
_Tp __cxx_atomic_fetch_and(volatile __cxx_atomic_base_impl<_Tp>* __a,
870
                           _Tp __pattern, memory_order __order) {
871
  return __atomic_fetch_and(&__a->__a_value, __pattern,
872
                            __to_gcc_order(__order));
873
}
874
875
template <typename _Tp>
876
_LIBCPP_INLINE_VISIBILITY
877
_Tp __cxx_atomic_fetch_and(__cxx_atomic_base_impl<_Tp>* __a,
878
                           _Tp __pattern, memory_order __order) {
879
  return __atomic_fetch_and(&__a->__a_value, __pattern,
880
                            __to_gcc_order(__order));
881
}
882
883
template <typename _Tp>
884
_LIBCPP_INLINE_VISIBILITY
885
_Tp __cxx_atomic_fetch_or(volatile __cxx_atomic_base_impl<_Tp>* __a,
886
                          _Tp __pattern, memory_order __order) {
887
  return __atomic_fetch_or(&__a->__a_value, __pattern,
888
                           __to_gcc_order(__order));
889
}
890
891
template <typename _Tp>
892
_LIBCPP_INLINE_VISIBILITY
893
_Tp __cxx_atomic_fetch_or(__cxx_atomic_base_impl<_Tp>* __a, _Tp __pattern,
894
                          memory_order __order) {
895
  return __atomic_fetch_or(&__a->__a_value, __pattern,
896
                           __to_gcc_order(__order));
897
}
898
899
template <typename _Tp>
900
_LIBCPP_INLINE_VISIBILITY
901
_Tp __cxx_atomic_fetch_xor(volatile __cxx_atomic_base_impl<_Tp>* __a,
902
                           _Tp __pattern, memory_order __order) {
903
  return __atomic_fetch_xor(&__a->__a_value, __pattern,
904
                            __to_gcc_order(__order));
905
}
906
907
template <typename _Tp>
908
_LIBCPP_INLINE_VISIBILITY
909
_Tp __cxx_atomic_fetch_xor(__cxx_atomic_base_impl<_Tp>* __a, _Tp __pattern,
910
                           memory_order __order) {
911
  return __atomic_fetch_xor(&__a->__a_value, __pattern,
912
                            __to_gcc_order(__order));
913
}
914
915
#define __cxx_atomic_is_lock_free(__s) __atomic_is_lock_free(__s, 0)
916
917
#elif defined(_LIBCPP_HAS_C_ATOMIC_IMP)
918
919
template <typename _Tp>
920
struct __cxx_atomic_base_impl {
921
922
  _LIBCPP_INLINE_VISIBILITY
923
#ifndef _LIBCPP_CXX03_LANG
924
    __cxx_atomic_base_impl() _NOEXCEPT = default;
925
#else
926
    __cxx_atomic_base_impl() _NOEXCEPT : __a_value() {}
927
#endif // _LIBCPP_CXX03_LANG
928
  _LIBCPP_CONSTEXPR explicit __cxx_atomic_base_impl(_Tp value) _NOEXCEPT
929
0
    : __a_value(value) {}
930
  _LIBCPP_DISABLE_EXTENSION_WARNING _Atomic(_Tp) __a_value;
931
};
932
933
#define __cxx_atomic_is_lock_free(__s) __c11_atomic_is_lock_free(__s)
934
935
_LIBCPP_INLINE_VISIBILITY inline
936
0
void __cxx_atomic_thread_fence(memory_order __order) _NOEXCEPT {
937
0
    __c11_atomic_thread_fence(static_cast<__memory_order_underlying_t>(__order));
938
0
}
939
940
_LIBCPP_INLINE_VISIBILITY inline
941
void __cxx_atomic_signal_fence(memory_order __order) _NOEXCEPT {
942
    __c11_atomic_signal_fence(static_cast<__memory_order_underlying_t>(__order));
943
}
944
945
template<class _Tp>
946
_LIBCPP_INLINE_VISIBILITY
947
void __cxx_atomic_init(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __val) _NOEXCEPT {
948
    __c11_atomic_init(&__a->__a_value, __val);
949
}
950
template<class _Tp>
951
_LIBCPP_INLINE_VISIBILITY
952
void __cxx_atomic_init(__cxx_atomic_base_impl<_Tp> * __a, _Tp __val) _NOEXCEPT {
953
    __c11_atomic_init(&__a->__a_value, __val);
954
}
955
956
template<class _Tp>
957
_LIBCPP_INLINE_VISIBILITY
958
0
void __cxx_atomic_store(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __val, memory_order __order) _NOEXCEPT {
959
0
    __c11_atomic_store(&__a->__a_value, __val, static_cast<__memory_order_underlying_t>(__order));
960
0
}
961
template<class _Tp>
962
_LIBCPP_INLINE_VISIBILITY
963
void __cxx_atomic_store(__cxx_atomic_base_impl<_Tp> * __a, _Tp __val, memory_order __order) _NOEXCEPT {
964
    __c11_atomic_store(&__a->__a_value, __val, static_cast<__memory_order_underlying_t>(__order));
965
}
966
967
template<class _Tp>
968
_LIBCPP_INLINE_VISIBILITY
969
0
_Tp __cxx_atomic_load(__cxx_atomic_base_impl<_Tp> const volatile* __a, memory_order __order) _NOEXCEPT {
970
0
    using __ptr_type = typename remove_const<decltype(__a->__a_value)>::type*;
971
0
    return __c11_atomic_load(const_cast<__ptr_type>(&__a->__a_value), static_cast<__memory_order_underlying_t>(__order));
972
0
}
Unexecuted instantiation: bool std::__1::__cxx_atomic_load<bool>(std::__1::__cxx_atomic_base_impl<bool> const volatile*, std::__1::memory_order)
Unexecuted instantiation: long long std::__1::__cxx_atomic_load<long long>(std::__1::__cxx_atomic_base_impl<long long> const volatile*, std::__1::memory_order)
973
template<class _Tp>
974
_LIBCPP_INLINE_VISIBILITY
975
_Tp __cxx_atomic_load(__cxx_atomic_base_impl<_Tp> const* __a, memory_order __order) _NOEXCEPT {
976
    using __ptr_type = typename remove_const<decltype(__a->__a_value)>::type*;
977
    return __c11_atomic_load(const_cast<__ptr_type>(&__a->__a_value), static_cast<__memory_order_underlying_t>(__order));
978
}
979
980
template<class _Tp>
981
_LIBCPP_INLINE_VISIBILITY
982
0
_Tp __cxx_atomic_exchange(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __value, memory_order __order) _NOEXCEPT {
983
0
    return __c11_atomic_exchange(&__a->__a_value, __value, static_cast<__memory_order_underlying_t>(__order));
984
0
}
985
template<class _Tp>
986
_LIBCPP_INLINE_VISIBILITY
987
0
_Tp __cxx_atomic_exchange(__cxx_atomic_base_impl<_Tp> * __a, _Tp __value, memory_order __order) _NOEXCEPT {
988
0
    return __c11_atomic_exchange(&__a->__a_value, __value, static_cast<__memory_order_underlying_t>(__order));
989
0
}
990
991
template<class _Tp>
992
_LIBCPP_INLINE_VISIBILITY
993
bool __cxx_atomic_compare_exchange_strong(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) _NOEXCEPT {
994
    return __c11_atomic_compare_exchange_strong(&__a->__a_value, __expected, __value, static_cast<__memory_order_underlying_t>(__success), static_cast<__memory_order_underlying_t>(__failure));
995
}
996
template<class _Tp>
997
_LIBCPP_INLINE_VISIBILITY
998
0
bool __cxx_atomic_compare_exchange_strong(__cxx_atomic_base_impl<_Tp> * __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) _NOEXCEPT {
999
0
    return __c11_atomic_compare_exchange_strong(&__a->__a_value, __expected, __value, static_cast<__memory_order_underlying_t>(__success), static_cast<__memory_order_underlying_t>(__failure));
1000
0
}
1001
1002
template<class _Tp>
1003
_LIBCPP_INLINE_VISIBILITY
1004
bool __cxx_atomic_compare_exchange_weak(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) _NOEXCEPT {
1005
    return __c11_atomic_compare_exchange_weak(&__a->__a_value, __expected, __value, static_cast<__memory_order_underlying_t>(__success), static_cast<__memory_order_underlying_t>(__failure));
1006
}
1007
template<class _Tp>
1008
_LIBCPP_INLINE_VISIBILITY
1009
bool __cxx_atomic_compare_exchange_weak(__cxx_atomic_base_impl<_Tp> * __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) _NOEXCEPT {
1010
    return __c11_atomic_compare_exchange_weak(&__a->__a_value, __expected, __value,  static_cast<__memory_order_underlying_t>(__success), static_cast<__memory_order_underlying_t>(__failure));
1011
}
1012
1013
template<class _Tp>
1014
_LIBCPP_INLINE_VISIBILITY
1015
0
_Tp __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __delta, memory_order __order) _NOEXCEPT {
1016
0
    return __c11_atomic_fetch_add(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
1017
0
}
1018
template<class _Tp>
1019
_LIBCPP_INLINE_VISIBILITY
1020
3.39M
_Tp __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp> * __a, _Tp __delta, memory_order __order) _NOEXCEPT {
1021
3.39M
    return __c11_atomic_fetch_add(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
1022
3.39M
}
Unexecuted instantiation: long long std::__1::__cxx_atomic_fetch_add<long long>(std::__1::__cxx_atomic_base_impl<long long>*, long long, std::__1::memory_order)
int std::__1::__cxx_atomic_fetch_add<int>(std::__1::__cxx_atomic_base_impl<int>*, int, std::__1::memory_order)
Line
Count
Source
1020
3.39M
_Tp __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp> * __a, _Tp __delta, memory_order __order) _NOEXCEPT {
1021
3.39M
    return __c11_atomic_fetch_add(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
1022
3.39M
}
1023
1024
template<class _Tp>
1025
_LIBCPP_INLINE_VISIBILITY
1026
_Tp* __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp*> volatile* __a, ptrdiff_t __delta, memory_order __order) _NOEXCEPT {
1027
    return __c11_atomic_fetch_add(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
1028
}
1029
template<class _Tp>
1030
_LIBCPP_INLINE_VISIBILITY
1031
_Tp* __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp*> * __a, ptrdiff_t __delta, memory_order __order) _NOEXCEPT {
1032
    return __c11_atomic_fetch_add(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
1033
}
1034
1035
template<class _Tp>
1036
_LIBCPP_INLINE_VISIBILITY
1037
0
_Tp __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __delta, memory_order __order) _NOEXCEPT {
1038
0
    return __c11_atomic_fetch_sub(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
1039
0
}
1040
template<class _Tp>
1041
_LIBCPP_INLINE_VISIBILITY
1042
_Tp __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp> * __a, _Tp __delta, memory_order __order) _NOEXCEPT {
1043
    return __c11_atomic_fetch_sub(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
1044
}
1045
template<class _Tp>
1046
_LIBCPP_INLINE_VISIBILITY
1047
_Tp* __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp*> volatile* __a, ptrdiff_t __delta, memory_order __order) _NOEXCEPT {
1048
    return __c11_atomic_fetch_sub(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
1049
}
1050
template<class _Tp>
1051
_LIBCPP_INLINE_VISIBILITY
1052
_Tp* __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp*> * __a, ptrdiff_t __delta, memory_order __order) _NOEXCEPT {
1053
    return __c11_atomic_fetch_sub(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
1054
}
1055
1056
template<class _Tp>
1057
_LIBCPP_INLINE_VISIBILITY
1058
_Tp __cxx_atomic_fetch_and(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
1059
    return __c11_atomic_fetch_and(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order));
1060
}
1061
template<class _Tp>
1062
_LIBCPP_INLINE_VISIBILITY
1063
_Tp __cxx_atomic_fetch_and(__cxx_atomic_base_impl<_Tp> * __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
1064
    return __c11_atomic_fetch_and(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order));
1065
}
1066
1067
template<class _Tp>
1068
_LIBCPP_INLINE_VISIBILITY
1069
_Tp __cxx_atomic_fetch_or(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
1070
    return __c11_atomic_fetch_or(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order));
1071
}
1072
template<class _Tp>
1073
_LIBCPP_INLINE_VISIBILITY
1074
_Tp __cxx_atomic_fetch_or(__cxx_atomic_base_impl<_Tp> * __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
1075
    return __c11_atomic_fetch_or(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order));
1076
}
1077
1078
template<class _Tp>
1079
_LIBCPP_INLINE_VISIBILITY
1080
_Tp __cxx_atomic_fetch_xor(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
1081
    return __c11_atomic_fetch_xor(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order));
1082
}
1083
template<class _Tp>
1084
_LIBCPP_INLINE_VISIBILITY
1085
_Tp __cxx_atomic_fetch_xor(__cxx_atomic_base_impl<_Tp> * __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
1086
    return __c11_atomic_fetch_xor(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order));
1087
}
1088
1089
#endif // _LIBCPP_HAS_GCC_ATOMIC_IMP, _LIBCPP_HAS_C_ATOMIC_IMP
1090
1091
template <class _Tp>
1092
_LIBCPP_INLINE_VISIBILITY
1093
_Tp kill_dependency(_Tp __y) _NOEXCEPT
1094
{
1095
    return __y;
1096
}
1097
1098
#if defined(__CLANG_ATOMIC_BOOL_LOCK_FREE)
1099
# define ATOMIC_BOOL_LOCK_FREE      __CLANG_ATOMIC_BOOL_LOCK_FREE
1100
# define ATOMIC_CHAR_LOCK_FREE      __CLANG_ATOMIC_CHAR_LOCK_FREE
1101
# define ATOMIC_CHAR16_T_LOCK_FREE  __CLANG_ATOMIC_CHAR16_T_LOCK_FREE
1102
# define ATOMIC_CHAR32_T_LOCK_FREE  __CLANG_ATOMIC_CHAR32_T_LOCK_FREE
1103
# define ATOMIC_WCHAR_T_LOCK_FREE   __CLANG_ATOMIC_WCHAR_T_LOCK_FREE
1104
# define ATOMIC_SHORT_LOCK_FREE     __CLANG_ATOMIC_SHORT_LOCK_FREE
1105
# define ATOMIC_INT_LOCK_FREE       __CLANG_ATOMIC_INT_LOCK_FREE
1106
# define ATOMIC_LONG_LOCK_FREE      __CLANG_ATOMIC_LONG_LOCK_FREE
1107
# define ATOMIC_LLONG_LOCK_FREE     __CLANG_ATOMIC_LLONG_LOCK_FREE
1108
# define ATOMIC_POINTER_LOCK_FREE   __CLANG_ATOMIC_POINTER_LOCK_FREE
1109
#elif defined(__GCC_ATOMIC_BOOL_LOCK_FREE)
1110
# define ATOMIC_BOOL_LOCK_FREE      __GCC_ATOMIC_BOOL_LOCK_FREE
1111
# define ATOMIC_CHAR_LOCK_FREE      __GCC_ATOMIC_CHAR_LOCK_FREE
1112
# define ATOMIC_CHAR16_T_LOCK_FREE  __GCC_ATOMIC_CHAR16_T_LOCK_FREE
1113
# define ATOMIC_CHAR32_T_LOCK_FREE  __GCC_ATOMIC_CHAR32_T_LOCK_FREE
1114
# define ATOMIC_WCHAR_T_LOCK_FREE   __GCC_ATOMIC_WCHAR_T_LOCK_FREE
1115
# define ATOMIC_SHORT_LOCK_FREE     __GCC_ATOMIC_SHORT_LOCK_FREE
1116
# define ATOMIC_INT_LOCK_FREE       __GCC_ATOMIC_INT_LOCK_FREE
1117
# define ATOMIC_LONG_LOCK_FREE      __GCC_ATOMIC_LONG_LOCK_FREE
1118
# define ATOMIC_LLONG_LOCK_FREE     __GCC_ATOMIC_LLONG_LOCK_FREE
1119
# define ATOMIC_POINTER_LOCK_FREE   __GCC_ATOMIC_POINTER_LOCK_FREE
1120
#endif
1121
1122
#ifdef _LIBCPP_ATOMIC_ONLY_USE_BUILTINS
1123
1124
template<typename _Tp>
1125
struct __cxx_atomic_lock_impl {
1126
1127
  _LIBCPP_INLINE_VISIBILITY
1128
  __cxx_atomic_lock_impl() _NOEXCEPT
1129
    : __a_value(), __a_lock(0) {}
1130
  _LIBCPP_INLINE_VISIBILITY _LIBCPP_CONSTEXPR explicit
1131
  __cxx_atomic_lock_impl(_Tp value) _NOEXCEPT
1132
    : __a_value(value), __a_lock(0) {}
1133
1134
  _Tp __a_value;
1135
  mutable __cxx_atomic_base_impl<_LIBCPP_ATOMIC_FLAG_TYPE> __a_lock;
1136
1137
  _LIBCPP_INLINE_VISIBILITY void __lock() const volatile {
1138
    while(1 == __cxx_atomic_exchange(&__a_lock, _LIBCPP_ATOMIC_FLAG_TYPE(true), memory_order_acquire))
1139
        /*spin*/;
1140
  }
1141
  _LIBCPP_INLINE_VISIBILITY void __lock() const {
1142
    while(1 == __cxx_atomic_exchange(&__a_lock, _LIBCPP_ATOMIC_FLAG_TYPE(true), memory_order_acquire))
1143
        /*spin*/;
1144
  }
1145
  _LIBCPP_INLINE_VISIBILITY void __unlock() const volatile {
1146
    __cxx_atomic_store(&__a_lock, _LIBCPP_ATOMIC_FLAG_TYPE(false), memory_order_release);
1147
  }
1148
  _LIBCPP_INLINE_VISIBILITY void __unlock() const {
1149
    __cxx_atomic_store(&__a_lock, _LIBCPP_ATOMIC_FLAG_TYPE(false), memory_order_release);
1150
  }
1151
  _LIBCPP_INLINE_VISIBILITY _Tp __read() const volatile {
1152
    __lock();
1153
    _Tp __old;
1154
    __cxx_atomic_assign_volatile(__old, __a_value);
1155
    __unlock();
1156
    return __old;
1157
  }
1158
  _LIBCPP_INLINE_VISIBILITY _Tp __read() const {
1159
    __lock();
1160
    _Tp __old = __a_value;
1161
    __unlock();
1162
    return __old;
1163
  }
1164
};
1165
1166
template <typename _Tp>
1167
_LIBCPP_INLINE_VISIBILITY
1168
void __cxx_atomic_init(volatile __cxx_atomic_lock_impl<_Tp>* __a,  _Tp __val) {
1169
  __cxx_atomic_assign_volatile(__a->__a_value, __val);
1170
}
1171
template <typename _Tp>
1172
_LIBCPP_INLINE_VISIBILITY
1173
void __cxx_atomic_init(__cxx_atomic_lock_impl<_Tp>* __a,  _Tp __val) {
1174
  __a->__a_value = __val;
1175
}
1176
1177
template <typename _Tp>
1178
_LIBCPP_INLINE_VISIBILITY
1179
void __cxx_atomic_store(volatile __cxx_atomic_lock_impl<_Tp>* __a,  _Tp __val, memory_order) {
1180
  __a->__lock();
1181
  __cxx_atomic_assign_volatile(__a->__a_value, __val);
1182
  __a->__unlock();
1183
}
1184
template <typename _Tp>
1185
_LIBCPP_INLINE_VISIBILITY
1186
void __cxx_atomic_store(__cxx_atomic_lock_impl<_Tp>* __a,  _Tp __val, memory_order) {
1187
  __a->__lock();
1188
  __a->__a_value = __val;
1189
  __a->__unlock();
1190
}
1191
1192
template <typename _Tp>
1193
_LIBCPP_INLINE_VISIBILITY
1194
_Tp __cxx_atomic_load(const volatile __cxx_atomic_lock_impl<_Tp>* __a, memory_order) {
1195
  return __a->__read();
1196
}
1197
template <typename _Tp>
1198
_LIBCPP_INLINE_VISIBILITY
1199
_Tp __cxx_atomic_load(const __cxx_atomic_lock_impl<_Tp>* __a, memory_order) {
1200
  return __a->__read();
1201
}
1202
1203
template <typename _Tp>
1204
_LIBCPP_INLINE_VISIBILITY
1205
_Tp __cxx_atomic_exchange(volatile __cxx_atomic_lock_impl<_Tp>* __a, _Tp __value, memory_order) {
1206
  __a->__lock();
1207
  _Tp __old;
1208
  __cxx_atomic_assign_volatile(__old, __a->__a_value);
1209
  __cxx_atomic_assign_volatile(__a->__a_value, __value);
1210
  __a->__unlock();
1211
  return __old;
1212
}
1213
template <typename _Tp>
1214
_LIBCPP_INLINE_VISIBILITY
1215
_Tp __cxx_atomic_exchange(__cxx_atomic_lock_impl<_Tp>* __a, _Tp __value, memory_order) {
1216
  __a->__lock();
1217
  _Tp __old = __a->__a_value;
1218
  __a->__a_value = __value;
1219
  __a->__unlock();
1220
  return __old;
1221
}
1222
1223
template <typename _Tp>
1224
_LIBCPP_INLINE_VISIBILITY
1225
bool __cxx_atomic_compare_exchange_strong(volatile __cxx_atomic_lock_impl<_Tp>* __a,
1226
                                          _Tp* __expected, _Tp __value, memory_order, memory_order) {
1227
  __a->__lock();
1228
  _Tp __temp;
1229
  __cxx_atomic_assign_volatile(__temp, __a->__a_value);
1230
  bool __ret = __temp == *__expected;
1231
  if(__ret)
1232
    __cxx_atomic_assign_volatile(__a->__a_value, __value);
1233
  else
1234
    __cxx_atomic_assign_volatile(*__expected, __a->__a_value);
1235
  __a->__unlock();
1236
  return __ret;
1237
}
1238
template <typename _Tp>
1239
_LIBCPP_INLINE_VISIBILITY
1240
bool __cxx_atomic_compare_exchange_strong(__cxx_atomic_lock_impl<_Tp>* __a,
1241
                                          _Tp* __expected, _Tp __value, memory_order, memory_order) {
1242
  __a->__lock();
1243
  bool __ret = __a->__a_value == *__expected;
1244
  if(__ret)
1245
    __a->__a_value = __value;
1246
  else
1247
    *__expected = __a->__a_value;
1248
  __a->__unlock();
1249
  return __ret;
1250
}
1251
1252
template <typename _Tp>
1253
_LIBCPP_INLINE_VISIBILITY
1254
bool __cxx_atomic_compare_exchange_weak(volatile __cxx_atomic_lock_impl<_Tp>* __a,
1255
                                        _Tp* __expected, _Tp __value, memory_order, memory_order) {
1256
  __a->__lock();
1257
  _Tp __temp;
1258
  __cxx_atomic_assign_volatile(__temp, __a->__a_value);
1259
  bool __ret = __temp == *__expected;
1260
  if(__ret)
1261
    __cxx_atomic_assign_volatile(__a->__a_value, __value);
1262
  else
1263
    __cxx_atomic_assign_volatile(*__expected, __a->__a_value);
1264
  __a->__unlock();
1265
  return __ret;
1266
}
1267
template <typename _Tp>
1268
_LIBCPP_INLINE_VISIBILITY
1269
bool __cxx_atomic_compare_exchange_weak(__cxx_atomic_lock_impl<_Tp>* __a,
1270
                                        _Tp* __expected, _Tp __value, memory_order, memory_order) {
1271
  __a->__lock();
1272
  bool __ret = __a->__a_value == *__expected;
1273
  if(__ret)
1274
    __a->__a_value = __value;
1275
  else
1276
    *__expected = __a->__a_value;
1277
  __a->__unlock();
1278
  return __ret;
1279
}
1280
1281
template <typename _Tp, typename _Td>
1282
_LIBCPP_INLINE_VISIBILITY
1283
_Tp __cxx_atomic_fetch_add(volatile __cxx_atomic_lock_impl<_Tp>* __a,
1284
                           _Td __delta, memory_order) {
1285
  __a->__lock();
1286
  _Tp __old;
1287
  __cxx_atomic_assign_volatile(__old, __a->__a_value);
1288
  __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old + __delta));
1289
  __a->__unlock();
1290
  return __old;
1291
}
1292
template <typename _Tp, typename _Td>
1293
_LIBCPP_INLINE_VISIBILITY
1294
_Tp __cxx_atomic_fetch_add(__cxx_atomic_lock_impl<_Tp>* __a,
1295
                           _Td __delta, memory_order) {
1296
  __a->__lock();
1297
  _Tp __old = __a->__a_value;
1298
  __a->__a_value += __delta;
1299
  __a->__unlock();
1300
  return __old;
1301
}
1302
1303
template <typename _Tp, typename _Td>
1304
_LIBCPP_INLINE_VISIBILITY
1305
_Tp* __cxx_atomic_fetch_add(volatile __cxx_atomic_lock_impl<_Tp*>* __a,
1306
                           ptrdiff_t __delta, memory_order) {
1307
  __a->__lock();
1308
  _Tp* __old;
1309
  __cxx_atomic_assign_volatile(__old, __a->__a_value);
1310
  __cxx_atomic_assign_volatile(__a->__a_value, __old + __delta);
1311
  __a->__unlock();
1312
  return __old;
1313
}
1314
template <typename _Tp, typename _Td>
1315
_LIBCPP_INLINE_VISIBILITY
1316
_Tp* __cxx_atomic_fetch_add(__cxx_atomic_lock_impl<_Tp*>* __a,
1317
                           ptrdiff_t __delta, memory_order) {
1318
  __a->__lock();
1319
  _Tp* __old = __a->__a_value;
1320
  __a->__a_value += __delta;
1321
  __a->__unlock();
1322
  return __old;
1323
}
1324
1325
template <typename _Tp, typename _Td>
1326
_LIBCPP_INLINE_VISIBILITY
1327
_Tp __cxx_atomic_fetch_sub(volatile __cxx_atomic_lock_impl<_Tp>* __a,
1328
                           _Td __delta, memory_order) {
1329
  __a->__lock();
1330
  _Tp __old;
1331
  __cxx_atomic_assign_volatile(__old, __a->__a_value);
1332
  __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old - __delta));
1333
  __a->__unlock();
1334
  return __old;
1335
}
1336
template <typename _Tp, typename _Td>
1337
_LIBCPP_INLINE_VISIBILITY
1338
_Tp __cxx_atomic_fetch_sub(__cxx_atomic_lock_impl<_Tp>* __a,
1339
                           _Td __delta, memory_order) {
1340
  __a->__lock();
1341
  _Tp __old = __a->__a_value;
1342
  __a->__a_value -= __delta;
1343
  __a->__unlock();
1344
  return __old;
1345
}
1346
1347
template <typename _Tp>
1348
_LIBCPP_INLINE_VISIBILITY
1349
_Tp __cxx_atomic_fetch_and(volatile __cxx_atomic_lock_impl<_Tp>* __a,
1350
                           _Tp __pattern, memory_order) {
1351
  __a->__lock();
1352
  _Tp __old;
1353
  __cxx_atomic_assign_volatile(__old, __a->__a_value);
1354
  __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old & __pattern));
1355
  __a->__unlock();
1356
  return __old;
1357
}
1358
template <typename _Tp>
1359
_LIBCPP_INLINE_VISIBILITY
1360
_Tp __cxx_atomic_fetch_and(__cxx_atomic_lock_impl<_Tp>* __a,
1361
                           _Tp __pattern, memory_order) {
1362
  __a->__lock();
1363
  _Tp __old = __a->__a_value;
1364
  __a->__a_value &= __pattern;
1365
  __a->__unlock();
1366
  return __old;
1367
}
1368
1369
template <typename _Tp>
1370
_LIBCPP_INLINE_VISIBILITY
1371
_Tp __cxx_atomic_fetch_or(volatile __cxx_atomic_lock_impl<_Tp>* __a,
1372
                          _Tp __pattern, memory_order) {
1373
  __a->__lock();
1374
  _Tp __old;
1375
  __cxx_atomic_assign_volatile(__old, __a->__a_value);
1376
  __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old | __pattern));
1377
  __a->__unlock();
1378
  return __old;
1379
}
1380
template <typename _Tp>
1381
_LIBCPP_INLINE_VISIBILITY
1382
_Tp __cxx_atomic_fetch_or(__cxx_atomic_lock_impl<_Tp>* __a,
1383
                          _Tp __pattern, memory_order) {
1384
  __a->__lock();
1385
  _Tp __old = __a->__a_value;
1386
  __a->__a_value |= __pattern;
1387
  __a->__unlock();
1388
  return __old;
1389
}
1390
1391
template <typename _Tp>
1392
_LIBCPP_INLINE_VISIBILITY
1393
_Tp __cxx_atomic_fetch_xor(volatile __cxx_atomic_lock_impl<_Tp>* __a,
1394
                           _Tp __pattern, memory_order) {
1395
  __a->__lock();
1396
  _Tp __old;
1397
  __cxx_atomic_assign_volatile(__old, __a->__a_value);
1398
  __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old ^ __pattern));
1399
  __a->__unlock();
1400
  return __old;
1401
}
1402
template <typename _Tp>
1403
_LIBCPP_INLINE_VISIBILITY
1404
_Tp __cxx_atomic_fetch_xor(__cxx_atomic_lock_impl<_Tp>* __a,
1405
                           _Tp __pattern, memory_order) {
1406
  __a->__lock();
1407
  _Tp __old = __a->__a_value;
1408
  __a->__a_value ^= __pattern;
1409
  __a->__unlock();
1410
  return __old;
1411
}
1412
1413
#ifdef __cpp_lib_atomic_is_always_lock_free
1414
1415
template<typename _Tp> struct __cxx_is_always_lock_free {
1416
    enum { __value = __atomic_always_lock_free(sizeof(_Tp), 0) }; };
1417
1418
#else
1419
1420
template<typename _Tp> struct __cxx_is_always_lock_free { enum { __value = false }; };
1421
// Implementations must match the C ATOMIC_*_LOCK_FREE macro values.
1422
template<> struct __cxx_is_always_lock_free<bool> { enum { __value = 2 == ATOMIC_BOOL_LOCK_FREE }; };
1423
template<> struct __cxx_is_always_lock_free<char> { enum { __value = 2 == ATOMIC_CHAR_LOCK_FREE }; };
1424
template<> struct __cxx_is_always_lock_free<signed char> { enum { __value = 2 == ATOMIC_CHAR_LOCK_FREE }; };
1425
template<> struct __cxx_is_always_lock_free<unsigned char> { enum { __value = 2 == ATOMIC_CHAR_LOCK_FREE }; };
1426
template<> struct __cxx_is_always_lock_free<char16_t> { enum { __value = 2 == ATOMIC_CHAR16_T_LOCK_FREE }; };
1427
template<> struct __cxx_is_always_lock_free<char32_t> { enum { __value = 2 == ATOMIC_CHAR32_T_LOCK_FREE }; };
1428
template<> struct __cxx_is_always_lock_free<wchar_t> { enum { __value = 2 == ATOMIC_WCHAR_T_LOCK_FREE }; };
1429
template<> struct __cxx_is_always_lock_free<short> { enum { __value = 2 == ATOMIC_SHORT_LOCK_FREE }; };
1430
template<> struct __cxx_is_always_lock_free<unsigned short> { enum { __value = 2 == ATOMIC_SHORT_LOCK_FREE }; };
1431
template<> struct __cxx_is_always_lock_free<int> { enum { __value = 2 == ATOMIC_INT_LOCK_FREE }; };
1432
template<> struct __cxx_is_always_lock_free<unsigned int> { enum { __value = 2 == ATOMIC_INT_LOCK_FREE }; };
1433
template<> struct __cxx_is_always_lock_free<long> { enum { __value = 2 == ATOMIC_LONG_LOCK_FREE }; };
1434
template<> struct __cxx_is_always_lock_free<unsigned long> { enum { __value = 2 == ATOMIC_LONG_LOCK_FREE }; };
1435
template<> struct __cxx_is_always_lock_free<long long> { enum { __value = 2 == ATOMIC_LLONG_LOCK_FREE }; };
1436
template<> struct __cxx_is_always_lock_free<unsigned long long> { enum { __value = 2 == ATOMIC_LLONG_LOCK_FREE }; };
1437
template<typename _Tp> struct __cxx_is_always_lock_free<_Tp*> { enum { __value = 2 == ATOMIC_POINTER_LOCK_FREE }; };
1438
template<> struct __cxx_is_always_lock_free<std::nullptr_t> { enum { __value = 2 == ATOMIC_POINTER_LOCK_FREE }; };
1439
1440
#endif //__cpp_lib_atomic_is_always_lock_free
1441
1442
template <typename _Tp,
1443
          typename _Base = typename conditional<__cxx_is_always_lock_free<_Tp>::__value,
1444
                                                __cxx_atomic_base_impl<_Tp>,
1445
                                                __cxx_atomic_lock_impl<_Tp> >::type>
1446
#else
1447
template <typename _Tp,
1448
          typename _Base = __cxx_atomic_base_impl<_Tp> >
1449
#endif //_LIBCPP_ATOMIC_ONLY_USE_BUILTINS
1450
struct __cxx_atomic_impl : public _Base {
1451
1452
#if _GNUC_VER >= 501
1453
    static_assert(is_trivially_copyable<_Tp>::value,
1454
      "std::atomic<Tp> requires that 'Tp' be a trivially copyable type");
1455
#endif
1456
1457
  _LIBCPP_INLINE_VISIBILITY __cxx_atomic_impl() _NOEXCEPT _LIBCPP_DEFAULT
1458
  _LIBCPP_INLINE_VISIBILITY _LIBCPP_CONSTEXPR explicit __cxx_atomic_impl(_Tp value) _NOEXCEPT
1459
0
    : _Base(value) {}
1460
};
1461
1462
#ifdef __linux__
1463
    using __cxx_contention_t = int32_t;
1464
#else
1465
    using __cxx_contention_t = int64_t;
1466
#endif //__linux__
1467
1468
#if _LIBCPP_STD_VER >= 11
1469
1470
using __cxx_atomic_contention_t = __cxx_atomic_impl<__cxx_contention_t>;
1471
1472
#ifndef _LIBCPP_HAS_NO_PLATFORM_WAIT
1473
1474
_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __cxx_atomic_notify_one(void const volatile*);
1475
_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __cxx_atomic_notify_all(void const volatile*);
1476
_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI __cxx_contention_t __libcpp_atomic_monitor(void const volatile*);
1477
_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __libcpp_atomic_wait(void const volatile*, __cxx_contention_t);
1478
1479
_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __cxx_atomic_notify_one(__cxx_atomic_contention_t const volatile*);
1480
_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __cxx_atomic_notify_all(__cxx_atomic_contention_t const volatile*);
1481
_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI __cxx_contention_t __libcpp_atomic_monitor(__cxx_atomic_contention_t const volatile*);
1482
_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __libcpp_atomic_wait(__cxx_atomic_contention_t const volatile*, __cxx_contention_t);
1483
1484
template <class _Atp, class _Fn>
1485
struct __libcpp_atomic_wait_backoff_impl {
1486
    _Atp* __a;
1487
    _Fn __test_fn;
1488
    _LIBCPP_INLINE_VISIBILITY bool operator()(chrono::nanoseconds __elapsed) const
1489
0
    {
1490
0
        if(__elapsed > chrono::microseconds(64))
1491
0
        {
1492
0
            auto const __monitor = __libcpp_atomic_monitor(__a);
1493
0
            if(__test_fn())
1494
0
                return true;
1495
0
            __libcpp_atomic_wait(__a, __monitor);
1496
0
        }
1497
0
        else if(__elapsed > chrono::microseconds(4))
1498
0
            __libcpp_thread_yield();
1499
0
        else
1500
0
            ; // poll
1501
0
        return false;
1502
0
    }
Unexecuted instantiation: std::__1::__libcpp_atomic_wait_backoff_impl<std::__1::__cxx_atomic_impl<bool, std::__1::__cxx_atomic_base_impl<bool> > const volatile, std::__1::__cxx_atomic_wait_test_fn_impl<std::__1::__cxx_atomic_impl<bool, std::__1::__cxx_atomic_base_impl<bool> > const volatile, bool> >::operator()(std::__1::chrono::duration<long long, std::__1::ratio<1l, 1000000000l> >) const
Unexecuted instantiation: std::__1::__libcpp_atomic_wait_backoff_impl<std::__1::__cxx_atomic_impl<bool, std::__1::__cxx_atomic_base_impl<bool> > const, std::__1::__cxx_atomic_wait_test_fn_impl<std::__1::__cxx_atomic_impl<bool, std::__1::__cxx_atomic_base_impl<bool> > const, bool> >::operator()(std::__1::chrono::duration<long long, std::__1::ratio<1l, 1000000000l> >) const
1503
};
1504
1505
template <class _Atp, class _Fn>
1506
_LIBCPP_INLINE_VISIBILITY bool __cxx_atomic_wait(_Atp* __a, _Fn && __test_fn)
1507
0
{
1508
0
    __libcpp_atomic_wait_backoff_impl<_Atp, typename decay<_Fn>::type> __backoff_fn = {__a, __test_fn};
1509
0
    return __libcpp_thread_poll_with_backoff(__test_fn, __backoff_fn);
1510
0
}
Unexecuted instantiation: bool std::__1::__cxx_atomic_wait<std::__1::__cxx_atomic_impl<bool, std::__1::__cxx_atomic_base_impl<bool> > const volatile, std::__1::__cxx_atomic_wait_test_fn_impl<std::__1::__cxx_atomic_impl<bool, std::__1::__cxx_atomic_base_impl<bool> > const volatile, bool>&>(std::__1::__cxx_atomic_impl<bool, std::__1::__cxx_atomic_base_impl<bool> > const volatile*, std::__1::__cxx_atomic_wait_test_fn_impl<std::__1::__cxx_atomic_impl<bool, std::__1::__cxx_atomic_base_impl<bool> > const volatile, bool>&&&)
Unexecuted instantiation: bool std::__1::__cxx_atomic_wait<std::__1::__cxx_atomic_impl<bool, std::__1::__cxx_atomic_base_impl<bool> > const, std::__1::__cxx_atomic_wait_test_fn_impl<std::__1::__cxx_atomic_impl<bool, std::__1::__cxx_atomic_base_impl<bool> > const, bool>&>(std::__1::__cxx_atomic_impl<bool, std::__1::__cxx_atomic_base_impl<bool> > const*, std::__1::__cxx_atomic_wait_test_fn_impl<std::__1::__cxx_atomic_impl<bool, std::__1::__cxx_atomic_base_impl<bool> > const, bool>&&&)
1511
1512
#else // _LIBCPP_HAS_NO_PLATFORM_WAIT
1513
1514
template <class _Tp>
1515
_LIBCPP_INLINE_VISIBILITY void __cxx_atomic_notify_all(__cxx_atomic_impl<_Tp> const volatile*) { }
1516
template <class _Tp>
1517
_LIBCPP_INLINE_VISIBILITY void __cxx_atomic_notify_one(__cxx_atomic_impl<_Tp> const volatile*) { }
1518
template <class _Atp, class _Fn>
1519
_LIBCPP_INLINE_VISIBILITY bool __cxx_atomic_wait(_Atp*, _Fn && __test_fn)
1520
{
1521
    return __libcpp_thread_poll_with_backoff(__test_fn, __libcpp_timed_backoff_policy());
1522
}
1523
1524
#endif // _LIBCPP_HAS_NO_PLATFORM_WAIT
1525
1526
template <class _Atp, class _Tp>
1527
struct __cxx_atomic_wait_test_fn_impl {
1528
    _Atp* __a;
1529
    _Tp __val;
1530
    memory_order __order;
1531
    _LIBCPP_INLINE_VISIBILITY bool operator()() const
1532
0
    {
1533
0
        return !__cxx_nonatomic_compare_equal(__cxx_atomic_load(__a, __order), __val);
1534
0
    }
Unexecuted instantiation: std::__1::__cxx_atomic_wait_test_fn_impl<std::__1::__cxx_atomic_impl<bool, std::__1::__cxx_atomic_base_impl<bool> > const volatile, bool>::operator()() const
Unexecuted instantiation: std::__1::__cxx_atomic_wait_test_fn_impl<std::__1::__cxx_atomic_impl<bool, std::__1::__cxx_atomic_base_impl<bool> > const, bool>::operator()() const
1535
};
1536
1537
template <class _Atp, class _Tp>
1538
_LIBCPP_INLINE_VISIBILITY bool __cxx_atomic_wait(_Atp* __a, _Tp const __val, memory_order __order)
1539
0
{
1540
0
    __cxx_atomic_wait_test_fn_impl<_Atp, _Tp> __test_fn = {__a, __val, __order};
1541
0
    return __cxx_atomic_wait(__a, __test_fn);
1542
0
}
Unexecuted instantiation: bool std::__1::__cxx_atomic_wait<std::__1::__cxx_atomic_impl<bool, std::__1::__cxx_atomic_base_impl<bool> > const volatile, bool>(std::__1::__cxx_atomic_impl<bool, std::__1::__cxx_atomic_base_impl<bool> > const volatile*, bool, std::__1::memory_order)
Unexecuted instantiation: bool std::__1::__cxx_atomic_wait<std::__1::__cxx_atomic_impl<bool, std::__1::__cxx_atomic_base_impl<bool> > const, bool>(std::__1::__cxx_atomic_impl<bool, std::__1::__cxx_atomic_base_impl<bool> > const*, bool, std::__1::memory_order)
1543
1544
#endif //_LIBCPP_STD_VER >= 11
1545
1546
// general atomic<T>
1547
1548
template <class _Tp, bool = is_integral<_Tp>::value && !is_same<_Tp, bool>::value>
1549
struct __atomic_base  // false
1550
{
1551
    mutable __cxx_atomic_impl<_Tp> __a_;
1552
1553
#if defined(__cpp_lib_atomic_is_always_lock_free)
1554
  static _LIBCPP_CONSTEXPR bool is_always_lock_free = __atomic_always_lock_free(sizeof(__a_), 0);
1555
#endif
1556
1557
    _LIBCPP_INLINE_VISIBILITY
1558
    bool is_lock_free() const volatile _NOEXCEPT
1559
        {return __cxx_atomic_is_lock_free(sizeof(_Tp));}
1560
    _LIBCPP_INLINE_VISIBILITY
1561
    bool is_lock_free() const _NOEXCEPT
1562
        {return static_cast<__atomic_base const volatile*>(this)->is_lock_free();}
1563
    _LIBCPP_INLINE_VISIBILITY
1564
    void store(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1565
      _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m)
1566
        {__cxx_atomic_store(&__a_, __d, __m);}
1567
    _LIBCPP_INLINE_VISIBILITY
1568
    void store(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1569
      _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m)
1570
        {__cxx_atomic_store(&__a_, __d, __m);}
1571
    _LIBCPP_INLINE_VISIBILITY
1572
    _Tp load(memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT
1573
      _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
1574
        {return __cxx_atomic_load(&__a_, __m);}
1575
    _LIBCPP_INLINE_VISIBILITY
1576
    _Tp load(memory_order __m = memory_order_seq_cst) const _NOEXCEPT
1577
      _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
1578
        {return __cxx_atomic_load(&__a_, __m);}
1579
    _LIBCPP_INLINE_VISIBILITY
1580
    operator _Tp() const volatile _NOEXCEPT {return load();}
1581
    _LIBCPP_INLINE_VISIBILITY
1582
    operator _Tp() const _NOEXCEPT          {return load();}
1583
    _LIBCPP_INLINE_VISIBILITY
1584
    _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1585
        {return __cxx_atomic_exchange(&__a_, __d, __m);}
1586
    _LIBCPP_INLINE_VISIBILITY
1587
    _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1588
        {return __cxx_atomic_exchange(&__a_, __d, __m);}
1589
    _LIBCPP_INLINE_VISIBILITY
1590
    bool compare_exchange_weak(_Tp& __e, _Tp __d,
1591
                               memory_order __s, memory_order __f) volatile _NOEXCEPT
1592
      _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
1593
        {return __cxx_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);}
1594
    _LIBCPP_INLINE_VISIBILITY
1595
    bool compare_exchange_weak(_Tp& __e, _Tp __d,
1596
                               memory_order __s, memory_order __f) _NOEXCEPT
1597
      _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
1598
        {return __cxx_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);}
1599
    _LIBCPP_INLINE_VISIBILITY
1600
    bool compare_exchange_strong(_Tp& __e, _Tp __d,
1601
                                 memory_order __s, memory_order __f) volatile _NOEXCEPT
1602
      _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
1603
        {return __cxx_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);}
1604
    _LIBCPP_INLINE_VISIBILITY
1605
    bool compare_exchange_strong(_Tp& __e, _Tp __d,
1606
                                 memory_order __s, memory_order __f) _NOEXCEPT
1607
      _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
1608
        {return __cxx_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);}
1609
    _LIBCPP_INLINE_VISIBILITY
1610
    bool compare_exchange_weak(_Tp& __e, _Tp __d,
1611
                              memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1612
        {return __cxx_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);}
1613
    _LIBCPP_INLINE_VISIBILITY
1614
    bool compare_exchange_weak(_Tp& __e, _Tp __d,
1615
                               memory_order __m = memory_order_seq_cst) _NOEXCEPT
1616
        {return __cxx_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);}
1617
    _LIBCPP_INLINE_VISIBILITY
1618
    bool compare_exchange_strong(_Tp& __e, _Tp __d,
1619
                              memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1620
        {return __cxx_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);}
1621
    _LIBCPP_INLINE_VISIBILITY
1622
    bool compare_exchange_strong(_Tp& __e, _Tp __d,
1623
                                 memory_order __m = memory_order_seq_cst) _NOEXCEPT
1624
0
        {return __cxx_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);}
1625
1626
    _LIBCPP_INLINE_VISIBILITY void wait(_Tp __v, memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT
1627
        {__cxx_atomic_wait(&__a_, __v, __m);}
1628
    _LIBCPP_INLINE_VISIBILITY void wait(_Tp __v, memory_order __m = memory_order_seq_cst) const _NOEXCEPT
1629
        {__cxx_atomic_wait(&__a_, __v, __m);}
1630
    _LIBCPP_INLINE_VISIBILITY void notify_one() volatile _NOEXCEPT
1631
        {__cxx_atomic_notify_one(&__a_);}
1632
    _LIBCPP_INLINE_VISIBILITY void notify_one() _NOEXCEPT
1633
        {__cxx_atomic_notify_one(&__a_);}
1634
    _LIBCPP_INLINE_VISIBILITY void notify_all() volatile _NOEXCEPT
1635
        {__cxx_atomic_notify_all(&__a_);}
1636
    _LIBCPP_INLINE_VISIBILITY void notify_all() _NOEXCEPT
1637
        {__cxx_atomic_notify_all(&__a_);}
1638
1639
    _LIBCPP_INLINE_VISIBILITY
1640
    __atomic_base() _NOEXCEPT _LIBCPP_DEFAULT
1641
1642
    _LIBCPP_INLINE_VISIBILITY _LIBCPP_CONSTEXPR
1643
0
    __atomic_base(_Tp __d) _NOEXCEPT : __a_(__d) {}
1644
1645
#ifndef _LIBCPP_CXX03_LANG
1646
    __atomic_base(const __atomic_base&) = delete;
1647
    __atomic_base& operator=(const __atomic_base&) = delete;
1648
    __atomic_base& operator=(const __atomic_base&) volatile = delete;
1649
#else
1650
private:
1651
    _LIBCPP_INLINE_VISIBILITY
1652
    __atomic_base(const __atomic_base&);
1653
    _LIBCPP_INLINE_VISIBILITY
1654
    __atomic_base& operator=(const __atomic_base&);
1655
    _LIBCPP_INLINE_VISIBILITY
1656
    __atomic_base& operator=(const __atomic_base&) volatile;
1657
#endif
1658
};
1659
1660
#if defined(__cpp_lib_atomic_is_always_lock_free)
1661
template <class _Tp, bool __b>
1662
_LIBCPP_CONSTEXPR bool __atomic_base<_Tp, __b>::is_always_lock_free;
1663
#endif
1664
1665
// atomic<Integral>
1666
1667
template <class _Tp>
1668
struct __atomic_base<_Tp, true>
1669
    : public __atomic_base<_Tp, false>
1670
{
1671
    typedef __atomic_base<_Tp, false> __base;
1672
    _LIBCPP_INLINE_VISIBILITY
1673
    __atomic_base() _NOEXCEPT _LIBCPP_DEFAULT
1674
    _LIBCPP_INLINE_VISIBILITY
1675
0
    _LIBCPP_CONSTEXPR __atomic_base(_Tp __d) _NOEXCEPT : __base(__d) {}
1676
1677
    _LIBCPP_INLINE_VISIBILITY
1678
    _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1679
        {return __cxx_atomic_fetch_add(&this->__a_, __op, __m);}
1680
    _LIBCPP_INLINE_VISIBILITY
1681
    _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1682
3.39M
        {return __cxx_atomic_fetch_add(&this->__a_, __op, __m);}
1683
    _LIBCPP_INLINE_VISIBILITY
1684
    _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1685
        {return __cxx_atomic_fetch_sub(&this->__a_, __op, __m);}
1686
    _LIBCPP_INLINE_VISIBILITY
1687
    _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1688
        {return __cxx_atomic_fetch_sub(&this->__a_, __op, __m);}
1689
    _LIBCPP_INLINE_VISIBILITY
1690
    _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1691
        {return __cxx_atomic_fetch_and(&this->__a_, __op, __m);}
1692
    _LIBCPP_INLINE_VISIBILITY
1693
    _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1694
        {return __cxx_atomic_fetch_and(&this->__a_, __op, __m);}
1695
    _LIBCPP_INLINE_VISIBILITY
1696
    _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1697
        {return __cxx_atomic_fetch_or(&this->__a_, __op, __m);}
1698
    _LIBCPP_INLINE_VISIBILITY
1699
    _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1700
        {return __cxx_atomic_fetch_or(&this->__a_, __op, __m);}
1701
    _LIBCPP_INLINE_VISIBILITY
1702
    _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1703
        {return __cxx_atomic_fetch_xor(&this->__a_, __op, __m);}
1704
    _LIBCPP_INLINE_VISIBILITY
1705
    _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1706
        {return __cxx_atomic_fetch_xor(&this->__a_, __op, __m);}
1707
1708
    _LIBCPP_INLINE_VISIBILITY
1709
    _Tp operator++(int) volatile _NOEXCEPT      {return fetch_add(_Tp(1));}
1710
    _LIBCPP_INLINE_VISIBILITY
1711
2
    _Tp operator++(int) _NOEXCEPT               {return fetch_add(_Tp(1));}
1712
    _LIBCPP_INLINE_VISIBILITY
1713
    _Tp operator--(int) volatile _NOEXCEPT      {return fetch_sub(_Tp(1));}
1714
    _LIBCPP_INLINE_VISIBILITY
1715
    _Tp operator--(int) _NOEXCEPT               {return fetch_sub(_Tp(1));}
1716
    _LIBCPP_INLINE_VISIBILITY
1717
    _Tp operator++() volatile _NOEXCEPT         {return fetch_add(_Tp(1)) + _Tp(1);}
1718
    _LIBCPP_INLINE_VISIBILITY
1719
    _Tp operator++() _NOEXCEPT                  {return fetch_add(_Tp(1)) + _Tp(1);}
1720
    _LIBCPP_INLINE_VISIBILITY
1721
    _Tp operator--() volatile _NOEXCEPT         {return fetch_sub(_Tp(1)) - _Tp(1);}
1722
    _LIBCPP_INLINE_VISIBILITY
1723
    _Tp operator--() _NOEXCEPT                  {return fetch_sub(_Tp(1)) - _Tp(1);}
1724
    _LIBCPP_INLINE_VISIBILITY
1725
    _Tp operator+=(_Tp __op) volatile _NOEXCEPT {return fetch_add(__op) + __op;}
1726
    _LIBCPP_INLINE_VISIBILITY
1727
    _Tp operator+=(_Tp __op) _NOEXCEPT          {return fetch_add(__op) + __op;}
1728
    _LIBCPP_INLINE_VISIBILITY
1729
    _Tp operator-=(_Tp __op) volatile _NOEXCEPT {return fetch_sub(__op) - __op;}
1730
    _LIBCPP_INLINE_VISIBILITY
1731
    _Tp operator-=(_Tp __op) _NOEXCEPT          {return fetch_sub(__op) - __op;}
1732
    _LIBCPP_INLINE_VISIBILITY
1733
    _Tp operator&=(_Tp __op) volatile _NOEXCEPT {return fetch_and(__op) & __op;}
1734
    _LIBCPP_INLINE_VISIBILITY
1735
    _Tp operator&=(_Tp __op) _NOEXCEPT          {return fetch_and(__op) & __op;}
1736
    _LIBCPP_INLINE_VISIBILITY
1737
    _Tp operator|=(_Tp __op) volatile _NOEXCEPT {return fetch_or(__op) | __op;}
1738
    _LIBCPP_INLINE_VISIBILITY
1739
    _Tp operator|=(_Tp __op) _NOEXCEPT          {return fetch_or(__op) | __op;}
1740
    _LIBCPP_INLINE_VISIBILITY
1741
    _Tp operator^=(_Tp __op) volatile _NOEXCEPT {return fetch_xor(__op) ^ __op;}
1742
    _LIBCPP_INLINE_VISIBILITY
1743
    _Tp operator^=(_Tp __op) _NOEXCEPT          {return fetch_xor(__op) ^ __op;}
1744
};
1745
1746
// atomic<T>
1747
1748
template <class _Tp>
1749
struct atomic
1750
    : public __atomic_base<_Tp>
1751
{
1752
    typedef __atomic_base<_Tp> __base;
1753
    typedef _Tp value_type;
1754
    _LIBCPP_INLINE_VISIBILITY
1755
    atomic() _NOEXCEPT _LIBCPP_DEFAULT
1756
    _LIBCPP_INLINE_VISIBILITY
1757
    _LIBCPP_CONSTEXPR atomic(_Tp __d) _NOEXCEPT : __base(__d) {}
1758
1759
    _LIBCPP_INLINE_VISIBILITY
1760
    _Tp operator=(_Tp __d) volatile _NOEXCEPT
1761
        {__base::store(__d); return __d;}
1762
    _LIBCPP_INLINE_VISIBILITY
1763
    _Tp operator=(_Tp __d) _NOEXCEPT
1764
        {__base::store(__d); return __d;}
1765
};
1766
1767
// atomic<T*>
1768
1769
template <class _Tp>
1770
struct atomic<_Tp*>
1771
    : public __atomic_base<_Tp*>
1772
{
1773
    typedef __atomic_base<_Tp*> __base;
1774
    typedef _Tp* value_type;
1775
    _LIBCPP_INLINE_VISIBILITY
1776
    atomic() _NOEXCEPT _LIBCPP_DEFAULT
1777
    _LIBCPP_INLINE_VISIBILITY
1778
    _LIBCPP_CONSTEXPR atomic(_Tp* __d) _NOEXCEPT : __base(__d) {}
1779
1780
    _LIBCPP_INLINE_VISIBILITY
1781
    _Tp* operator=(_Tp* __d) volatile _NOEXCEPT
1782
        {__base::store(__d); return __d;}
1783
    _LIBCPP_INLINE_VISIBILITY
1784
    _Tp* operator=(_Tp* __d) _NOEXCEPT
1785
        {__base::store(__d); return __d;}
1786
1787
    _LIBCPP_INLINE_VISIBILITY
1788
    _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst)
1789
                                                                        volatile _NOEXCEPT
1790
        {return __cxx_atomic_fetch_add(&this->__a_, __op, __m);}
1791
    _LIBCPP_INLINE_VISIBILITY
1792
    _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1793
        {return __cxx_atomic_fetch_add(&this->__a_, __op, __m);}
1794
    _LIBCPP_INLINE_VISIBILITY
1795
    _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst)
1796
                                                                        volatile _NOEXCEPT
1797
        {return __cxx_atomic_fetch_sub(&this->__a_, __op, __m);}
1798
    _LIBCPP_INLINE_VISIBILITY
1799
    _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1800
        {return __cxx_atomic_fetch_sub(&this->__a_, __op, __m);}
1801
1802
    _LIBCPP_INLINE_VISIBILITY
1803
    _Tp* operator++(int) volatile _NOEXCEPT            {return fetch_add(1);}
1804
    _LIBCPP_INLINE_VISIBILITY
1805
    _Tp* operator++(int) _NOEXCEPT                     {return fetch_add(1);}
1806
    _LIBCPP_INLINE_VISIBILITY
1807
    _Tp* operator--(int) volatile _NOEXCEPT            {return fetch_sub(1);}
1808
    _LIBCPP_INLINE_VISIBILITY
1809
    _Tp* operator--(int) _NOEXCEPT                     {return fetch_sub(1);}
1810
    _LIBCPP_INLINE_VISIBILITY
1811
    _Tp* operator++() volatile _NOEXCEPT               {return fetch_add(1) + 1;}
1812
    _LIBCPP_INLINE_VISIBILITY
1813
    _Tp* operator++() _NOEXCEPT                        {return fetch_add(1) + 1;}
1814
    _LIBCPP_INLINE_VISIBILITY
1815
    _Tp* operator--() volatile _NOEXCEPT               {return fetch_sub(1) - 1;}
1816
    _LIBCPP_INLINE_VISIBILITY
1817
    _Tp* operator--() _NOEXCEPT                        {return fetch_sub(1) - 1;}
1818
    _LIBCPP_INLINE_VISIBILITY
1819
    _Tp* operator+=(ptrdiff_t __op) volatile _NOEXCEPT {return fetch_add(__op) + __op;}
1820
    _LIBCPP_INLINE_VISIBILITY
1821
    _Tp* operator+=(ptrdiff_t __op) _NOEXCEPT          {return fetch_add(__op) + __op;}
1822
    _LIBCPP_INLINE_VISIBILITY
1823
    _Tp* operator-=(ptrdiff_t __op) volatile _NOEXCEPT {return fetch_sub(__op) - __op;}
1824
    _LIBCPP_INLINE_VISIBILITY
1825
    _Tp* operator-=(ptrdiff_t __op) _NOEXCEPT          {return fetch_sub(__op) - __op;}
1826
};
1827
1828
// atomic_is_lock_free
1829
1830
template <class _Tp>
1831
_LIBCPP_INLINE_VISIBILITY
1832
bool
1833
atomic_is_lock_free(const volatile atomic<_Tp>* __o) _NOEXCEPT
1834
{
1835
    return __o->is_lock_free();
1836
}
1837
1838
template <class _Tp>
1839
_LIBCPP_INLINE_VISIBILITY
1840
bool
1841
atomic_is_lock_free(const atomic<_Tp>* __o) _NOEXCEPT
1842
{
1843
    return __o->is_lock_free();
1844
}
1845
1846
// atomic_init
1847
1848
template <class _Tp>
1849
_LIBCPP_INLINE_VISIBILITY
1850
void
1851
atomic_init(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1852
{
1853
    __cxx_atomic_init(&__o->__a_, __d);
1854
}
1855
1856
template <class _Tp>
1857
_LIBCPP_INLINE_VISIBILITY
1858
void
1859
atomic_init(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1860
{
1861
    __cxx_atomic_init(&__o->__a_, __d);
1862
}
1863
1864
// atomic_store
1865
1866
template <class _Tp>
1867
_LIBCPP_INLINE_VISIBILITY
1868
void
1869
atomic_store(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1870
{
1871
    __o->store(__d);
1872
}
1873
1874
template <class _Tp>
1875
_LIBCPP_INLINE_VISIBILITY
1876
void
1877
atomic_store(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1878
{
1879
    __o->store(__d);
1880
}
1881
1882
// atomic_store_explicit
1883
1884
template <class _Tp>
1885
_LIBCPP_INLINE_VISIBILITY
1886
void
1887
atomic_store_explicit(volatile atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1888
  _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m)
1889
{
1890
    __o->store(__d, __m);
1891
}
1892
1893
template <class _Tp>
1894
_LIBCPP_INLINE_VISIBILITY
1895
void
1896
atomic_store_explicit(atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1897
  _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m)
1898
{
1899
    __o->store(__d, __m);
1900
}
1901
1902
// atomic_load
1903
1904
template <class _Tp>
1905
_LIBCPP_INLINE_VISIBILITY
1906
_Tp
1907
atomic_load(const volatile atomic<_Tp>* __o) _NOEXCEPT
1908
{
1909
    return __o->load();
1910
}
1911
1912
template <class _Tp>
1913
_LIBCPP_INLINE_VISIBILITY
1914
_Tp
1915
atomic_load(const atomic<_Tp>* __o) _NOEXCEPT
1916
{
1917
    return __o->load();
1918
}
1919
1920
// atomic_load_explicit
1921
1922
template <class _Tp>
1923
_LIBCPP_INLINE_VISIBILITY
1924
_Tp
1925
atomic_load_explicit(const volatile atomic<_Tp>* __o, memory_order __m) _NOEXCEPT
1926
  _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
1927
{
1928
    return __o->load(__m);
1929
}
1930
1931
template <class _Tp>
1932
_LIBCPP_INLINE_VISIBILITY
1933
_Tp
1934
atomic_load_explicit(const atomic<_Tp>* __o, memory_order __m) _NOEXCEPT
1935
  _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
1936
{
1937
    return __o->load(__m);
1938
}
1939
1940
// atomic_exchange
1941
1942
template <class _Tp>
1943
_LIBCPP_INLINE_VISIBILITY
1944
_Tp
1945
atomic_exchange(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1946
{
1947
    return __o->exchange(__d);
1948
}
1949
1950
template <class _Tp>
1951
_LIBCPP_INLINE_VISIBILITY
1952
_Tp
1953
atomic_exchange(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1954
{
1955
    return __o->exchange(__d);
1956
}
1957
1958
// atomic_exchange_explicit
1959
1960
template <class _Tp>
1961
_LIBCPP_INLINE_VISIBILITY
1962
_Tp
1963
atomic_exchange_explicit(volatile atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1964
{
1965
    return __o->exchange(__d, __m);
1966
}
1967
1968
template <class _Tp>
1969
_LIBCPP_INLINE_VISIBILITY
1970
_Tp
1971
atomic_exchange_explicit(atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1972
{
1973
    return __o->exchange(__d, __m);
1974
}
1975
1976
// atomic_compare_exchange_weak
1977
1978
template <class _Tp>
1979
_LIBCPP_INLINE_VISIBILITY
1980
bool
1981
atomic_compare_exchange_weak(volatile atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
1982
{
1983
    return __o->compare_exchange_weak(*__e, __d);
1984
}
1985
1986
template <class _Tp>
1987
_LIBCPP_INLINE_VISIBILITY
1988
bool
1989
atomic_compare_exchange_weak(atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
1990
{
1991
    return __o->compare_exchange_weak(*__e, __d);
1992
}
1993
1994
// atomic_compare_exchange_strong
1995
1996
template <class _Tp>
1997
_LIBCPP_INLINE_VISIBILITY
1998
bool
1999
atomic_compare_exchange_strong(volatile atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
2000
{
2001
    return __o->compare_exchange_strong(*__e, __d);
2002
}
2003
2004
template <class _Tp>
2005
_LIBCPP_INLINE_VISIBILITY
2006
bool
2007
atomic_compare_exchange_strong(atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
2008
{
2009
    return __o->compare_exchange_strong(*__e, __d);
2010
}
2011
2012
// atomic_compare_exchange_weak_explicit
2013
2014
template <class _Tp>
2015
_LIBCPP_INLINE_VISIBILITY
2016
bool
2017
atomic_compare_exchange_weak_explicit(volatile atomic<_Tp>* __o, _Tp* __e,
2018
                                      _Tp __d,
2019
                                      memory_order __s, memory_order __f) _NOEXCEPT
2020
  _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
2021
{
2022
    return __o->compare_exchange_weak(*__e, __d, __s, __f);
2023
}
2024
2025
template <class _Tp>
2026
_LIBCPP_INLINE_VISIBILITY
2027
bool
2028
atomic_compare_exchange_weak_explicit(atomic<_Tp>* __o, _Tp* __e, _Tp __d,
2029
                                      memory_order __s, memory_order __f) _NOEXCEPT
2030
  _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
2031
{
2032
    return __o->compare_exchange_weak(*__e, __d, __s, __f);
2033
}
2034
2035
// atomic_compare_exchange_strong_explicit
2036
2037
template <class _Tp>
2038
_LIBCPP_INLINE_VISIBILITY
2039
bool
2040
atomic_compare_exchange_strong_explicit(volatile atomic<_Tp>* __o,
2041
                                        _Tp* __e, _Tp __d,
2042
                                        memory_order __s, memory_order __f) _NOEXCEPT
2043
  _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
2044
{
2045
    return __o->compare_exchange_strong(*__e, __d, __s, __f);
2046
}
2047
2048
template <class _Tp>
2049
_LIBCPP_INLINE_VISIBILITY
2050
bool
2051
atomic_compare_exchange_strong_explicit(atomic<_Tp>* __o, _Tp* __e,
2052
                                        _Tp __d,
2053
                                        memory_order __s, memory_order __f) _NOEXCEPT
2054
  _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
2055
{
2056
    return __o->compare_exchange_strong(*__e, __d, __s, __f);
2057
}
2058
2059
// atomic_wait
2060
2061
template <class _Tp>
2062
_LIBCPP_INLINE_VISIBILITY
2063
void atomic_wait(const volatile atomic<_Tp>* __o,
2064
                 typename atomic<_Tp>::value_type __v) _NOEXCEPT
2065
{
2066
    return __o->wait(__v);
2067
}
2068
2069
template <class _Tp>
2070
_LIBCPP_INLINE_VISIBILITY
2071
void atomic_wait(const atomic<_Tp>* __o,
2072
                 typename atomic<_Tp>::value_type __v) _NOEXCEPT
2073
{
2074
    return __o->wait(__v);
2075
}
2076
2077
// atomic_wait_explicit
2078
2079
template <class _Tp>
2080
_LIBCPP_INLINE_VISIBILITY
2081
void atomic_wait_explicit(const volatile atomic<_Tp>* __o,
2082
                          typename atomic<_Tp>::value_type __v,
2083
                          memory_order __m) _NOEXCEPT
2084
  _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
2085
{
2086
    return __o->wait(__v, __m);
2087
}
2088
2089
template <class _Tp>
2090
_LIBCPP_INLINE_VISIBILITY
2091
void atomic_wait_explicit(const atomic<_Tp>* __o,
2092
                          typename atomic<_Tp>::value_type __v,
2093
                          memory_order __m) _NOEXCEPT
2094
  _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
2095
{
2096
    return __o->wait(__v, __m);
2097
}
2098
2099
// atomic_notify_one
2100
2101
template <class _Tp>
2102
_LIBCPP_INLINE_VISIBILITY
2103
void atomic_notify_one(volatile atomic<_Tp>* __o) _NOEXCEPT
2104
{
2105
    __o->notify_one();
2106
}
2107
template <class _Tp>
2108
_LIBCPP_INLINE_VISIBILITY
2109
void atomic_notify_one(atomic<_Tp>* __o) _NOEXCEPT
2110
{
2111
    __o->notify_one();
2112
}
2113
2114
// atomic_notify_one
2115
2116
template <class _Tp>
2117
_LIBCPP_INLINE_VISIBILITY
2118
void atomic_notify_all(volatile atomic<_Tp>* __o) _NOEXCEPT
2119
{
2120
    __o->notify_all();
2121
}
2122
template <class _Tp>
2123
_LIBCPP_INLINE_VISIBILITY
2124
void atomic_notify_all(atomic<_Tp>* __o) _NOEXCEPT
2125
{
2126
    __o->notify_all();
2127
}
2128
2129
// atomic_fetch_add
2130
2131
template <class _Tp>
2132
_LIBCPP_INLINE_VISIBILITY
2133
typename enable_if
2134
<
2135
    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2136
    _Tp
2137
>::type
2138
atomic_fetch_add(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
2139
{
2140
    return __o->fetch_add(__op);
2141
}
2142
2143
template <class _Tp>
2144
_LIBCPP_INLINE_VISIBILITY
2145
typename enable_if
2146
<
2147
    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2148
    _Tp
2149
>::type
2150
atomic_fetch_add(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
2151
{
2152
    return __o->fetch_add(__op);
2153
}
2154
2155
template <class _Tp>
2156
_LIBCPP_INLINE_VISIBILITY
2157
_Tp*
2158
atomic_fetch_add(volatile atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
2159
{
2160
    return __o->fetch_add(__op);
2161
}
2162
2163
template <class _Tp>
2164
_LIBCPP_INLINE_VISIBILITY
2165
_Tp*
2166
atomic_fetch_add(atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
2167
{
2168
    return __o->fetch_add(__op);
2169
}
2170
2171
// atomic_fetch_add_explicit
2172
2173
template <class _Tp>
2174
_LIBCPP_INLINE_VISIBILITY
2175
typename enable_if
2176
<
2177
    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2178
    _Tp
2179
>::type
2180
atomic_fetch_add_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
2181
{
2182
    return __o->fetch_add(__op, __m);
2183
}
2184
2185
template <class _Tp>
2186
_LIBCPP_INLINE_VISIBILITY
2187
typename enable_if
2188
<
2189
    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2190
    _Tp
2191
>::type
2192
atomic_fetch_add_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
2193
{
2194
    return __o->fetch_add(__op, __m);
2195
}
2196
2197
template <class _Tp>
2198
_LIBCPP_INLINE_VISIBILITY
2199
_Tp*
2200
atomic_fetch_add_explicit(volatile atomic<_Tp*>* __o, ptrdiff_t __op,
2201
                          memory_order __m) _NOEXCEPT
2202
{
2203
    return __o->fetch_add(__op, __m);
2204
}
2205
2206
template <class _Tp>
2207
_LIBCPP_INLINE_VISIBILITY
2208
_Tp*
2209
atomic_fetch_add_explicit(atomic<_Tp*>* __o, ptrdiff_t __op, memory_order __m) _NOEXCEPT
2210
{
2211
    return __o->fetch_add(__op, __m);
2212
}
2213
2214
// atomic_fetch_sub
2215
2216
template <class _Tp>
2217
_LIBCPP_INLINE_VISIBILITY
2218
typename enable_if
2219
<
2220
    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2221
    _Tp
2222
>::type
2223
atomic_fetch_sub(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
2224
{
2225
    return __o->fetch_sub(__op);
2226
}
2227
2228
template <class _Tp>
2229
_LIBCPP_INLINE_VISIBILITY
2230
typename enable_if
2231
<
2232
    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2233
    _Tp
2234
>::type
2235
atomic_fetch_sub(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
2236
{
2237
    return __o->fetch_sub(__op);
2238
}
2239
2240
template <class _Tp>
2241
_LIBCPP_INLINE_VISIBILITY
2242
_Tp*
2243
atomic_fetch_sub(volatile atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
2244
{
2245
    return __o->fetch_sub(__op);
2246
}
2247
2248
template <class _Tp>
2249
_LIBCPP_INLINE_VISIBILITY
2250
_Tp*
2251
atomic_fetch_sub(atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
2252
{
2253
    return __o->fetch_sub(__op);
2254
}
2255
2256
// atomic_fetch_sub_explicit
2257
2258
template <class _Tp>
2259
_LIBCPP_INLINE_VISIBILITY
2260
typename enable_if
2261
<
2262
    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2263
    _Tp
2264
>::type
2265
atomic_fetch_sub_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
2266
{
2267
    return __o->fetch_sub(__op, __m);
2268
}
2269
2270
template <class _Tp>
2271
_LIBCPP_INLINE_VISIBILITY
2272
typename enable_if
2273
<
2274
    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2275
    _Tp
2276
>::type
2277
atomic_fetch_sub_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
2278
{
2279
    return __o->fetch_sub(__op, __m);
2280
}
2281
2282
template <class _Tp>
2283
_LIBCPP_INLINE_VISIBILITY
2284
_Tp*
2285
atomic_fetch_sub_explicit(volatile atomic<_Tp*>* __o, ptrdiff_t __op,
2286
                          memory_order __m) _NOEXCEPT
2287
{
2288
    return __o->fetch_sub(__op, __m);
2289
}
2290
2291
template <class _Tp>
2292
_LIBCPP_INLINE_VISIBILITY
2293
_Tp*
2294
atomic_fetch_sub_explicit(atomic<_Tp*>* __o, ptrdiff_t __op, memory_order __m) _NOEXCEPT
2295
{
2296
    return __o->fetch_sub(__op, __m);
2297
}
2298
2299
// atomic_fetch_and
2300
2301
template <class _Tp>
2302
_LIBCPP_INLINE_VISIBILITY
2303
typename enable_if
2304
<
2305
    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2306
    _Tp
2307
>::type
2308
atomic_fetch_and(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
2309
{
2310
    return __o->fetch_and(__op);
2311
}
2312
2313
template <class _Tp>
2314
_LIBCPP_INLINE_VISIBILITY
2315
typename enable_if
2316
<
2317
    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2318
    _Tp
2319
>::type
2320
atomic_fetch_and(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
2321
{
2322
    return __o->fetch_and(__op);
2323
}
2324
2325
// atomic_fetch_and_explicit
2326
2327
template <class _Tp>
2328
_LIBCPP_INLINE_VISIBILITY
2329
typename enable_if
2330
<
2331
    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2332
    _Tp
2333
>::type
2334
atomic_fetch_and_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
2335
{
2336
    return __o->fetch_and(__op, __m);
2337
}
2338
2339
template <class _Tp>
2340
_LIBCPP_INLINE_VISIBILITY
2341
typename enable_if
2342
<
2343
    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2344
    _Tp
2345
>::type
2346
atomic_fetch_and_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
2347
{
2348
    return __o->fetch_and(__op, __m);
2349
}
2350
2351
// atomic_fetch_or
2352
2353
template <class _Tp>
2354
_LIBCPP_INLINE_VISIBILITY
2355
typename enable_if
2356
<
2357
    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2358
    _Tp
2359
>::type
2360
atomic_fetch_or(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
2361
{
2362
    return __o->fetch_or(__op);
2363
}
2364
2365
template <class _Tp>
2366
_LIBCPP_INLINE_VISIBILITY
2367
typename enable_if
2368
<
2369
    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2370
    _Tp
2371
>::type
2372
atomic_fetch_or(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
2373
{
2374
    return __o->fetch_or(__op);
2375
}
2376
2377
// atomic_fetch_or_explicit
2378
2379
template <class _Tp>
2380
_LIBCPP_INLINE_VISIBILITY
2381
typename enable_if
2382
<
2383
    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2384
    _Tp
2385
>::type
2386
atomic_fetch_or_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
2387
{
2388
    return __o->fetch_or(__op, __m);
2389
}
2390
2391
template <class _Tp>
2392
_LIBCPP_INLINE_VISIBILITY
2393
typename enable_if
2394
<
2395
    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2396
    _Tp
2397
>::type
2398
atomic_fetch_or_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
2399
{
2400
    return __o->fetch_or(__op, __m);
2401
}
2402
2403
// atomic_fetch_xor
2404
2405
template <class _Tp>
2406
_LIBCPP_INLINE_VISIBILITY
2407
typename enable_if
2408
<
2409
    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2410
    _Tp
2411
>::type
2412
atomic_fetch_xor(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
2413
{
2414
    return __o->fetch_xor(__op);
2415
}
2416
2417
template <class _Tp>
2418
_LIBCPP_INLINE_VISIBILITY
2419
typename enable_if
2420
<
2421
    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2422
    _Tp
2423
>::type
2424
atomic_fetch_xor(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
2425
{
2426
    return __o->fetch_xor(__op);
2427
}
2428
2429
// atomic_fetch_xor_explicit
2430
2431
template <class _Tp>
2432
_LIBCPP_INLINE_VISIBILITY
2433
typename enable_if
2434
<
2435
    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2436
    _Tp
2437
>::type
2438
atomic_fetch_xor_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
2439
{
2440
    return __o->fetch_xor(__op, __m);
2441
}
2442
2443
template <class _Tp>
2444
_LIBCPP_INLINE_VISIBILITY
2445
typename enable_if
2446
<
2447
    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2448
    _Tp
2449
>::type
2450
atomic_fetch_xor_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
2451
{
2452
    return __o->fetch_xor(__op, __m);
2453
}
2454
2455
// flag type and operations
2456
2457
typedef struct atomic_flag
2458
{
2459
    __cxx_atomic_impl<_LIBCPP_ATOMIC_FLAG_TYPE> __a_;
2460
2461
    _LIBCPP_INLINE_VISIBILITY
2462
    bool test(memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT
2463
0
        {return _LIBCPP_ATOMIC_FLAG_TYPE(true) == __cxx_atomic_load(&__a_, __m);}
2464
    _LIBCPP_INLINE_VISIBILITY
2465
    bool test(memory_order __m = memory_order_seq_cst) const _NOEXCEPT
2466
0
        {return _LIBCPP_ATOMIC_FLAG_TYPE(true) == __cxx_atomic_load(&__a_, __m);}
2467
2468
    _LIBCPP_INLINE_VISIBILITY
2469
    bool test_and_set(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
2470
0
        {return __cxx_atomic_exchange(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(true), __m);}
2471
    _LIBCPP_INLINE_VISIBILITY
2472
    bool test_and_set(memory_order __m = memory_order_seq_cst) _NOEXCEPT
2473
0
        {return __cxx_atomic_exchange(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(true), __m);}
2474
    _LIBCPP_INLINE_VISIBILITY
2475
    void clear(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
2476
0
        {__cxx_atomic_store(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(false), __m);}
2477
    _LIBCPP_INLINE_VISIBILITY
2478
    void clear(memory_order __m = memory_order_seq_cst) _NOEXCEPT
2479
0
        {__cxx_atomic_store(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(false), __m);}
2480
2481
    _LIBCPP_INLINE_VISIBILITY
2482
    void wait(bool __v, memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT
2483
0
        {__cxx_atomic_wait(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(__v), __m);}
2484
    _LIBCPP_INLINE_VISIBILITY
2485
    void wait(bool __v, memory_order __m = memory_order_seq_cst) const _NOEXCEPT
2486
0
        {__cxx_atomic_wait(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(__v), __m);}
2487
    _LIBCPP_INLINE_VISIBILITY
2488
    void notify_one() volatile _NOEXCEPT
2489
0
        {__cxx_atomic_notify_one(&__a_);}
2490
    _LIBCPP_INLINE_VISIBILITY
2491
    void notify_one() _NOEXCEPT
2492
0
        {__cxx_atomic_notify_one(&__a_);}
2493
    _LIBCPP_INLINE_VISIBILITY
2494
    void notify_all() volatile _NOEXCEPT
2495
0
        {__cxx_atomic_notify_all(&__a_);}
2496
    _LIBCPP_INLINE_VISIBILITY
2497
    void notify_all() _NOEXCEPT
2498
0
        {__cxx_atomic_notify_all(&__a_);}
2499
2500
    _LIBCPP_INLINE_VISIBILITY
2501
    atomic_flag() _NOEXCEPT _LIBCPP_DEFAULT
2502
2503
    _LIBCPP_INLINE_VISIBILITY _LIBCPP_CONSTEXPR
2504
0
    atomic_flag(bool __b) _NOEXCEPT : __a_(__b) {} // EXTENSION
2505
2506
#ifndef _LIBCPP_CXX03_LANG
2507
    atomic_flag(const atomic_flag&) = delete;
2508
    atomic_flag& operator=(const atomic_flag&) = delete;
2509
    atomic_flag& operator=(const atomic_flag&) volatile = delete;
2510
#else
2511
private:
2512
    _LIBCPP_INLINE_VISIBILITY
2513
    atomic_flag(const atomic_flag&);
2514
    _LIBCPP_INLINE_VISIBILITY
2515
    atomic_flag& operator=(const atomic_flag&);
2516
    _LIBCPP_INLINE_VISIBILITY
2517
    atomic_flag& operator=(const atomic_flag&) volatile;
2518
#endif
2519
} atomic_flag;
2520
2521
2522
inline _LIBCPP_INLINE_VISIBILITY
2523
bool
2524
atomic_flag_test(const volatile atomic_flag* __o) _NOEXCEPT
2525
0
{
2526
0
    return __o->test();
2527
0
}
2528
2529
inline _LIBCPP_INLINE_VISIBILITY
2530
bool
2531
atomic_flag_test(const atomic_flag* __o) _NOEXCEPT
2532
0
{
2533
0
    return __o->test();
2534
0
}
2535
2536
inline _LIBCPP_INLINE_VISIBILITY
2537
bool
2538
atomic_flag_test_explicit(const volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
2539
0
{
2540
0
    return __o->test(__m);
2541
0
}
2542
2543
inline _LIBCPP_INLINE_VISIBILITY
2544
bool
2545
atomic_flag_test_explicit(const atomic_flag* __o, memory_order __m) _NOEXCEPT
2546
0
{
2547
0
    return __o->test(__m);
2548
0
}
2549
2550
inline _LIBCPP_INLINE_VISIBILITY
2551
bool
2552
atomic_flag_test_and_set(volatile atomic_flag* __o) _NOEXCEPT
2553
0
{
2554
0
    return __o->test_and_set();
2555
0
}
2556
2557
inline _LIBCPP_INLINE_VISIBILITY
2558
bool
2559
atomic_flag_test_and_set(atomic_flag* __o) _NOEXCEPT
2560
0
{
2561
0
    return __o->test_and_set();
2562
0
}
2563
2564
inline _LIBCPP_INLINE_VISIBILITY
2565
bool
2566
atomic_flag_test_and_set_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
2567
0
{
2568
0
    return __o->test_and_set(__m);
2569
0
}
2570
2571
inline _LIBCPP_INLINE_VISIBILITY
2572
bool
2573
atomic_flag_test_and_set_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT
2574
0
{
2575
0
    return __o->test_and_set(__m);
2576
0
}
2577
2578
inline _LIBCPP_INLINE_VISIBILITY
2579
void
2580
atomic_flag_clear(volatile atomic_flag* __o) _NOEXCEPT
2581
0
{
2582
0
    __o->clear();
2583
0
}
2584
2585
inline _LIBCPP_INLINE_VISIBILITY
2586
void
2587
atomic_flag_clear(atomic_flag* __o) _NOEXCEPT
2588
0
{
2589
0
    __o->clear();
2590
0
}
2591
2592
inline _LIBCPP_INLINE_VISIBILITY
2593
void
2594
atomic_flag_clear_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
2595
0
{
2596
0
    __o->clear(__m);
2597
0
}
2598
2599
inline _LIBCPP_INLINE_VISIBILITY
2600
void
2601
atomic_flag_clear_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT
2602
0
{
2603
0
    __o->clear(__m);
2604
0
}
2605
2606
inline _LIBCPP_INLINE_VISIBILITY
2607
void
2608
atomic_flag_wait(const volatile atomic_flag* __o, bool __v) _NOEXCEPT
2609
0
{
2610
0
    __o->wait(__v);
2611
0
}
2612
2613
inline _LIBCPP_INLINE_VISIBILITY
2614
void
2615
atomic_flag_wait(const atomic_flag* __o, bool __v) _NOEXCEPT
2616
0
{
2617
0
    __o->wait(__v);
2618
0
}
2619
2620
inline _LIBCPP_INLINE_VISIBILITY
2621
void
2622
atomic_flag_wait_explicit(const volatile atomic_flag* __o,
2623
                          bool __v, memory_order __m) _NOEXCEPT
2624
0
{
2625
0
    __o->wait(__v, __m);
2626
0
}
2627
2628
inline _LIBCPP_INLINE_VISIBILITY
2629
void
2630
atomic_flag_wait_explicit(const atomic_flag* __o,
2631
                          bool __v, memory_order __m) _NOEXCEPT
2632
0
{
2633
0
    __o->wait(__v, __m);
2634
0
}
2635
2636
inline _LIBCPP_INLINE_VISIBILITY
2637
void
2638
atomic_flag_notify_one(volatile atomic_flag* __o) _NOEXCEPT
2639
0
{
2640
0
    __o->notify_one();
2641
0
}
2642
2643
inline _LIBCPP_INLINE_VISIBILITY
2644
void
2645
atomic_flag_notify_one(atomic_flag* __o) _NOEXCEPT
2646
0
{
2647
0
    __o->notify_one();
2648
0
}
2649
2650
inline _LIBCPP_INLINE_VISIBILITY
2651
void
2652
atomic_flag_notify_all(volatile atomic_flag* __o) _NOEXCEPT
2653
0
{
2654
0
    __o->notify_all();
2655
0
}
2656
2657
inline _LIBCPP_INLINE_VISIBILITY
2658
void
2659
atomic_flag_notify_all(atomic_flag* __o) _NOEXCEPT
2660
0
{
2661
0
    __o->notify_all();
2662
0
}
2663
2664
// fences
2665
2666
inline _LIBCPP_INLINE_VISIBILITY
2667
void
2668
atomic_thread_fence(memory_order __m) _NOEXCEPT
2669
0
{
2670
0
    __cxx_atomic_thread_fence(__m);
2671
0
}
2672
2673
inline _LIBCPP_INLINE_VISIBILITY
2674
void
2675
atomic_signal_fence(memory_order __m) _NOEXCEPT
2676
{
2677
    __cxx_atomic_signal_fence(__m);
2678
}
2679
2680
// Atomics for standard typedef types
2681
2682
typedef atomic<bool>               atomic_bool;
2683
typedef atomic<char>               atomic_char;
2684
typedef atomic<signed char>        atomic_schar;
2685
typedef atomic<unsigned char>      atomic_uchar;
2686
typedef atomic<short>              atomic_short;
2687
typedef atomic<unsigned short>     atomic_ushort;
2688
typedef atomic<int>                atomic_int;
2689
typedef atomic<unsigned int>       atomic_uint;
2690
typedef atomic<long>               atomic_long;
2691
typedef atomic<unsigned long>      atomic_ulong;
2692
typedef atomic<long long>          atomic_llong;
2693
typedef atomic<unsigned long long> atomic_ullong;
2694
typedef atomic<char16_t>           atomic_char16_t;
2695
typedef atomic<char32_t>           atomic_char32_t;
2696
typedef atomic<wchar_t>            atomic_wchar_t;
2697
2698
typedef atomic<int_least8_t>   atomic_int_least8_t;
2699
typedef atomic<uint_least8_t>  atomic_uint_least8_t;
2700
typedef atomic<int_least16_t>  atomic_int_least16_t;
2701
typedef atomic<uint_least16_t> atomic_uint_least16_t;
2702
typedef atomic<int_least32_t>  atomic_int_least32_t;
2703
typedef atomic<uint_least32_t> atomic_uint_least32_t;
2704
typedef atomic<int_least64_t>  atomic_int_least64_t;
2705
typedef atomic<uint_least64_t> atomic_uint_least64_t;
2706
2707
typedef atomic<int_fast8_t>   atomic_int_fast8_t;
2708
typedef atomic<uint_fast8_t>  atomic_uint_fast8_t;
2709
typedef atomic<int_fast16_t>  atomic_int_fast16_t;
2710
typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
2711
typedef atomic<int_fast32_t>  atomic_int_fast32_t;
2712
typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
2713
typedef atomic<int_fast64_t>  atomic_int_fast64_t;
2714
typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
2715
2716
typedef atomic< int8_t>  atomic_int8_t;
2717
typedef atomic<uint8_t>  atomic_uint8_t;
2718
typedef atomic< int16_t> atomic_int16_t;
2719
typedef atomic<uint16_t> atomic_uint16_t;
2720
typedef atomic< int32_t> atomic_int32_t;
2721
typedef atomic<uint32_t> atomic_uint32_t;
2722
typedef atomic< int64_t> atomic_int64_t;
2723
typedef atomic<uint64_t> atomic_uint64_t;
2724
2725
typedef atomic<intptr_t>  atomic_intptr_t;
2726
typedef atomic<uintptr_t> atomic_uintptr_t;
2727
typedef atomic<size_t>    atomic_size_t;
2728
typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
2729
typedef atomic<intmax_t>  atomic_intmax_t;
2730
typedef atomic<uintmax_t> atomic_uintmax_t;
2731
2732
// atomic_*_lock_free : prefer the contention type most highly, then the largest lock-free type
2733
2734
#ifdef __cpp_lib_atomic_is_always_lock_free
2735
# define _LIBCPP_CONTENTION_LOCK_FREE __atomic_always_lock_free(sizeof(__cxx_contention_t), 0)
2736
#else
2737
# define _LIBCPP_CONTENTION_LOCK_FREE false
2738
#endif
2739
2740
#if ATOMIC_LLONG_LOCK_FREE == 2
2741
typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, long long>::type          __libcpp_signed_lock_free;
2742
typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, unsigned long long>::type __libcpp_unsigned_lock_free;
2743
#elif ATOMIC_INT_LOCK_FREE == 2
2744
typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, int>::type                __libcpp_signed_lock_free;
2745
typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, unsigned int>::type       __libcpp_unsigned_lock_free;
2746
#elif ATOMIC_SHORT_LOCK_FREE == 2
2747
typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, short>::type              __libcpp_signed_lock_free;
2748
typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, unsigned short>::type     __libcpp_unsigned_lock_free;
2749
#elif ATOMIC_CHAR_LOCK_FREE == 2
2750
typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, char>::type               __libcpp_signed_lock_free;
2751
typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, unsigned char>::type      __libcpp_unsigned_lock_free;
2752
#else
2753
    // No signed/unsigned lock-free types
2754
#endif
2755
2756
typedef atomic<__libcpp_signed_lock_free> atomic_signed_lock_free;
2757
typedef atomic<__libcpp_unsigned_lock_free> atomic_unsigned_lock_free;
2758
2759
#define ATOMIC_FLAG_INIT {false}
2760
#define ATOMIC_VAR_INIT(__v) {__v}
2761
2762
_LIBCPP_END_NAMESPACE_STD
2763
2764
#endif  // _LIBCPP_ATOMIC