libstdc++
atomic
Go to the documentation of this file.
1// -*- C++ -*- header.
2
3// Copyright (C) 2008-2020 Free Software Foundation, Inc.
4//
5// This file is part of the GNU ISO C++ Library. This library is free
6// software; you can redistribute it and/or modify it under the
7// terms of the GNU General Public License as published by the
8// Free Software Foundation; either version 3, or (at your option)
9// any later version.
10
11// This library is distributed in the hope that it will be useful,
12// but WITHOUT ANY WARRANTY; without even the implied warranty of
13// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14// GNU General Public License for more details.
15
16// Under Section 7 of GPL version 3, you are granted additional
17// permissions described in the GCC Runtime Library Exception, version
18// 3.1, as published by the Free Software Foundation.
19
20// You should have received a copy of the GNU General Public License and
21// a copy of the GCC Runtime Library Exception along with this program;
22// see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
23// <http://www.gnu.org/licenses/>.
24
25/** @file include/atomic
26 * This is a Standard C++ Library header.
27 */
28
29// Based on "C++ Atomic Types and Operations" by Hans Boehm and Lawrence Crowl.
30// http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2427.html
31
32#ifndef _GLIBCXX_ATOMIC
33#define _GLIBCXX_ATOMIC 1
34
35#pragma GCC system_header
36
37#if __cplusplus < 201103L
38# include <bits/c++0x_warning.h>
39#else
40
41#include <bits/atomic_base.h>
42
43namespace std _GLIBCXX_VISIBILITY(default)
44{
45_GLIBCXX_BEGIN_NAMESPACE_VERSION
46
47 /**
48 * @addtogroup atomics
49 * @{
50 */
51
52#if __cplusplus >= 201703L
53# define __cpp_lib_atomic_is_always_lock_free 201603
54#endif
55
56 template<typename _Tp>
57 struct atomic;
58
59 /// atomic<bool>
60 // NB: No operators or fetch-operations for this type.
61 template<>
62 struct atomic<bool>
63 {
64 using value_type = bool;
65
66 private:
67 __atomic_base<bool> _M_base;
68
69 public:
70 atomic() noexcept = default;
71 ~atomic() noexcept = default;
72 atomic(const atomic&) = delete;
73 atomic& operator=(const atomic&) = delete;
74 atomic& operator=(const atomic&) volatile = delete;
75
76 constexpr atomic(bool __i) noexcept : _M_base(__i) { }
77
78 bool
79 operator=(bool __i) noexcept
80 { return _M_base.operator=(__i); }
81
82 bool
83 operator=(bool __i) volatile noexcept
84 { return _M_base.operator=(__i); }
85
86 operator bool() const noexcept
87 { return _M_base.load(); }
88
89 operator bool() const volatile noexcept
90 { return _M_base.load(); }
91
92 bool
93 is_lock_free() const noexcept { return _M_base.is_lock_free(); }
94
95 bool
96 is_lock_free() const volatile noexcept { return _M_base.is_lock_free(); }
97
98#if __cplusplus >= 201703L
99 static constexpr bool is_always_lock_free = ATOMIC_BOOL_LOCK_FREE == 2;
100#endif
101
102 void
103 store(bool __i, memory_order __m = memory_order_seq_cst) noexcept
104 { _M_base.store(__i, __m); }
105
106 void
107 store(bool __i, memory_order __m = memory_order_seq_cst) volatile noexcept
108 { _M_base.store(__i, __m); }
109
110 bool
111 load(memory_order __m = memory_order_seq_cst) const noexcept
112 { return _M_base.load(__m); }
113
114 bool
115 load(memory_order __m = memory_order_seq_cst) const volatile noexcept
116 { return _M_base.load(__m); }
117
118 bool
119 exchange(bool __i, memory_order __m = memory_order_seq_cst) noexcept
120 { return _M_base.exchange(__i, __m); }
121
122 bool
123 exchange(bool __i,
124 memory_order __m = memory_order_seq_cst) volatile noexcept
125 { return _M_base.exchange(__i, __m); }
126
127 bool
128 compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
129 memory_order __m2) noexcept
130 { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
131
132 bool
133 compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
134 memory_order __m2) volatile noexcept
135 { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
136
137 bool
138 compare_exchange_weak(bool& __i1, bool __i2,
139 memory_order __m = memory_order_seq_cst) noexcept
140 { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
141
142 bool
143 compare_exchange_weak(bool& __i1, bool __i2,
144 memory_order __m = memory_order_seq_cst) volatile noexcept
145 { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
146
147 bool
148 compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
149 memory_order __m2) noexcept
150 { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
151
152 bool
153 compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
154 memory_order __m2) volatile noexcept
155 { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
156
157 bool
158 compare_exchange_strong(bool& __i1, bool __i2,
159 memory_order __m = memory_order_seq_cst) noexcept
160 { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
161
162 bool
163 compare_exchange_strong(bool& __i1, bool __i2,
164 memory_order __m = memory_order_seq_cst) volatile noexcept
165 { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
166 };
167
168#if __cplusplus <= 201703L
169# define _GLIBCXX20_INIT(I)
170#else
171# define _GLIBCXX20_INIT(I) = I
172#endif
173
174 /**
175 * @brief Generic atomic type, primary class template.
176 *
177 * @tparam _Tp Type to be made atomic, must be trivially copyable.
178 */
179 template<typename _Tp>
180 struct atomic
181 {
182 using value_type = _Tp;
183
184 private:
185 // Align 1/2/4/8/16-byte types to at least their size.
186 static constexpr int _S_min_alignment
187 = (sizeof(_Tp) & (sizeof(_Tp) - 1)) || sizeof(_Tp) > 16
188 ? 0 : sizeof(_Tp);
189
190 static constexpr int _S_alignment
191 = _S_min_alignment > alignof(_Tp) ? _S_min_alignment : alignof(_Tp);
192
193 alignas(_S_alignment) _Tp _M_i _GLIBCXX20_INIT(_Tp());
194
195 static_assert(__is_trivially_copyable(_Tp),
196 "std::atomic requires a trivially copyable type");
197
198 static_assert(sizeof(_Tp) > 0,
199 "Incomplete or zero-sized types are not supported");
200
201#if __cplusplus > 201703L
202 static_assert(is_copy_constructible_v<_Tp>);
203 static_assert(is_move_constructible_v<_Tp>);
204 static_assert(is_copy_assignable_v<_Tp>);
205 static_assert(is_move_assignable_v<_Tp>);
206#endif
207
208 public:
209 atomic() = default;
210 ~atomic() noexcept = default;
211 atomic(const atomic&) = delete;
212 atomic& operator=(const atomic&) = delete;
213 atomic& operator=(const atomic&) volatile = delete;
214
215 constexpr atomic(_Tp __i) noexcept : _M_i(__i) { }
216
217 operator _Tp() const noexcept
218 { return load(); }
219
220 operator _Tp() const volatile noexcept
221 { return load(); }
222
223 _Tp
224 operator=(_Tp __i) noexcept
225 { store(__i); return __i; }
226
227 _Tp
228 operator=(_Tp __i) volatile noexcept
229 { store(__i); return __i; }
230
231 bool
232 is_lock_free() const noexcept
233 {
234 // Produce a fake, minimally aligned pointer.
235 return __atomic_is_lock_free(sizeof(_M_i),
236 reinterpret_cast<void *>(-_S_alignment));
237 }
238
239 bool
240 is_lock_free() const volatile noexcept
241 {
242 // Produce a fake, minimally aligned pointer.
243 return __atomic_is_lock_free(sizeof(_M_i),
244 reinterpret_cast<void *>(-_S_alignment));
245 }
246
247#if __cplusplus >= 201703L
248 static constexpr bool is_always_lock_free
249 = __atomic_always_lock_free(sizeof(_M_i), 0);
250#endif
251
252 void
253 store(_Tp __i, memory_order __m = memory_order_seq_cst) noexcept
254 { __atomic_store(std::__addressof(_M_i), std::__addressof(__i), int(__m)); }
255
256 void
257 store(_Tp __i, memory_order __m = memory_order_seq_cst) volatile noexcept
258 { __atomic_store(std::__addressof(_M_i), std::__addressof(__i), int(__m)); }
259
260 _Tp
261 load(memory_order __m = memory_order_seq_cst) const noexcept
262 {
263 alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
264 _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
265 __atomic_load(std::__addressof(_M_i), __ptr, int(__m));
266 return *__ptr;
267 }
268
269 _Tp
270 load(memory_order __m = memory_order_seq_cst) const volatile noexcept
271 {
272 alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
273 _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
274 __atomic_load(std::__addressof(_M_i), __ptr, int(__m));
275 return *__ptr;
276 }
277
278 _Tp
279 exchange(_Tp __i, memory_order __m = memory_order_seq_cst) noexcept
280 {
281 alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
282 _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
283 __atomic_exchange(std::__addressof(_M_i), std::__addressof(__i),
284 __ptr, int(__m));
285 return *__ptr;
286 }
287
288 _Tp
289 exchange(_Tp __i,
290 memory_order __m = memory_order_seq_cst) volatile noexcept
291 {
292 alignas(_Tp) unsigned char __buf[sizeof(_Tp)];
293 _Tp* __ptr = reinterpret_cast<_Tp*>(__buf);
294 __atomic_exchange(std::__addressof(_M_i), std::__addressof(__i),
295 __ptr, int(__m));
296 return *__ptr;
297 }
298
299 bool
300 compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s,
301 memory_order __f) noexcept
302 {
303 return __atomic_compare_exchange(std::__addressof(_M_i),
304 std::__addressof(__e),
305 std::__addressof(__i),
306 true, int(__s), int(__f));
307 }
308
309 bool
310 compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s,
311 memory_order __f) volatile noexcept
312 {
313 return __atomic_compare_exchange(std::__addressof(_M_i),
314 std::__addressof(__e),
315 std::__addressof(__i),
316 true, int(__s), int(__f));
317 }
318
319 bool
320 compare_exchange_weak(_Tp& __e, _Tp __i,
321 memory_order __m = memory_order_seq_cst) noexcept
322 { return compare_exchange_weak(__e, __i, __m,
323 __cmpexch_failure_order(__m)); }
324
325 bool
326 compare_exchange_weak(_Tp& __e, _Tp __i,
327 memory_order __m = memory_order_seq_cst) volatile noexcept
328 { return compare_exchange_weak(__e, __i, __m,
329 __cmpexch_failure_order(__m)); }
330
331 bool
332 compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s,
333 memory_order __f) noexcept
334 {
335 return __atomic_compare_exchange(std::__addressof(_M_i),
336 std::__addressof(__e),
337 std::__addressof(__i),
338 false, int(__s), int(__f));
339 }
340
341 bool
342 compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s,
343 memory_order __f) volatile noexcept
344 {
345 return __atomic_compare_exchange(std::__addressof(_M_i),
346 std::__addressof(__e),
347 std::__addressof(__i),
348 false, int(__s), int(__f));
349 }
350
351 bool
352 compare_exchange_strong(_Tp& __e, _Tp __i,
353 memory_order __m = memory_order_seq_cst) noexcept
354 { return compare_exchange_strong(__e, __i, __m,
355 __cmpexch_failure_order(__m)); }
356
357 bool
358 compare_exchange_strong(_Tp& __e, _Tp __i,
359 memory_order __m = memory_order_seq_cst) volatile noexcept
360 { return compare_exchange_strong(__e, __i, __m,
361 __cmpexch_failure_order(__m)); }
362 };
363#undef _GLIBCXX20_INIT
364
365 /// Partial specialization for pointer types.
366 template<typename _Tp>
367 struct atomic<_Tp*>
368 {
369 using value_type = _Tp*;
370 using difference_type = ptrdiff_t;
371
372 typedef _Tp* __pointer_type;
373 typedef __atomic_base<_Tp*> __base_type;
374 __base_type _M_b;
375
376 atomic() noexcept = default;
377 ~atomic() noexcept = default;
378 atomic(const atomic&) = delete;
379 atomic& operator=(const atomic&) = delete;
380 atomic& operator=(const atomic&) volatile = delete;
381
382 constexpr atomic(__pointer_type __p) noexcept : _M_b(__p) { }
383
384 operator __pointer_type() const noexcept
385 { return __pointer_type(_M_b); }
386
387 operator __pointer_type() const volatile noexcept
388 { return __pointer_type(_M_b); }
389
390 __pointer_type
391 operator=(__pointer_type __p) noexcept
392 { return _M_b.operator=(__p); }
393
394 __pointer_type
395 operator=(__pointer_type __p) volatile noexcept
396 { return _M_b.operator=(__p); }
397
398 __pointer_type
399 operator++(int) noexcept
400 {
401#if __cplusplus >= 201703L
402 static_assert( is_object<_Tp>::value, "pointer to object type" );
403#endif
404 return _M_b++;
405 }
406
407 __pointer_type
408 operator++(int) volatile noexcept
409 {
410#if __cplusplus >= 201703L
411 static_assert( is_object<_Tp>::value, "pointer to object type" );
412#endif
413 return _M_b++;
414 }
415
416 __pointer_type
417 operator--(int) noexcept
418 {
419#if __cplusplus >= 201703L
420 static_assert( is_object<_Tp>::value, "pointer to object type" );
421#endif
422 return _M_b--;
423 }
424
425 __pointer_type
426 operator--(int) volatile noexcept
427 {
428#if __cplusplus >= 201703L
429 static_assert( is_object<_Tp>::value, "pointer to object type" );
430#endif
431 return _M_b--;
432 }
433
434 __pointer_type
435 operator++() noexcept
436 {
437#if __cplusplus >= 201703L
438 static_assert( is_object<_Tp>::value, "pointer to object type" );
439#endif
440 return ++_M_b;
441 }
442
443 __pointer_type
444 operator++() volatile noexcept
445 {
446#if __cplusplus >= 201703L
447 static_assert( is_object<_Tp>::value, "pointer to object type" );
448#endif
449 return ++_M_b;
450 }
451
452 __pointer_type
453 operator--() noexcept
454 {
455#if __cplusplus >= 201703L
456 static_assert( is_object<_Tp>::value, "pointer to object type" );
457#endif
458 return --_M_b;
459 }
460
461 __pointer_type
462 operator--() volatile noexcept
463 {
464#if __cplusplus >= 201703L
465 static_assert( is_object<_Tp>::value, "pointer to object type" );
466#endif
467 return --_M_b;
468 }
469
470 __pointer_type
471 operator+=(ptrdiff_t __d) noexcept
472 {
473#if __cplusplus >= 201703L
474 static_assert( is_object<_Tp>::value, "pointer to object type" );
475#endif
476 return _M_b.operator+=(__d);
477 }
478
479 __pointer_type
480 operator+=(ptrdiff_t __d) volatile noexcept
481 {
482#if __cplusplus >= 201703L
483 static_assert( is_object<_Tp>::value, "pointer to object type" );
484#endif
485 return _M_b.operator+=(__d);
486 }
487
488 __pointer_type
489 operator-=(ptrdiff_t __d) noexcept
490 {
491#if __cplusplus >= 201703L
492 static_assert( is_object<_Tp>::value, "pointer to object type" );
493#endif
494 return _M_b.operator-=(__d);
495 }
496
497 __pointer_type
498 operator-=(ptrdiff_t __d) volatile noexcept
499 {
500#if __cplusplus >= 201703L
501 static_assert( is_object<_Tp>::value, "pointer to object type" );
502#endif
503 return _M_b.operator-=(__d);
504 }
505
506 bool
507 is_lock_free() const noexcept
508 { return _M_b.is_lock_free(); }
509
510 bool
511 is_lock_free() const volatile noexcept
512 { return _M_b.is_lock_free(); }
513
514#if __cplusplus >= 201703L
515 static constexpr bool is_always_lock_free = ATOMIC_POINTER_LOCK_FREE == 2;
516#endif
517
518 void
519 store(__pointer_type __p,
520 memory_order __m = memory_order_seq_cst) noexcept
521 { return _M_b.store(__p, __m); }
522
523 void
524 store(__pointer_type __p,
525 memory_order __m = memory_order_seq_cst) volatile noexcept
526 { return _M_b.store(__p, __m); }
527
528 __pointer_type
529 load(memory_order __m = memory_order_seq_cst) const noexcept
530 { return _M_b.load(__m); }
531
532 __pointer_type
533 load(memory_order __m = memory_order_seq_cst) const volatile noexcept
534 { return _M_b.load(__m); }
535
536 __pointer_type
537 exchange(__pointer_type __p,
538 memory_order __m = memory_order_seq_cst) noexcept
539 { return _M_b.exchange(__p, __m); }
540
541 __pointer_type
542 exchange(__pointer_type __p,
543 memory_order __m = memory_order_seq_cst) volatile noexcept
544 { return _M_b.exchange(__p, __m); }
545
546 bool
547 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
548 memory_order __m1, memory_order __m2) noexcept
549 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
550
551 bool
552 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
553 memory_order __m1,
554 memory_order __m2) volatile noexcept
555 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
556
557 bool
558 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
559 memory_order __m = memory_order_seq_cst) noexcept
560 {
561 return compare_exchange_weak(__p1, __p2, __m,
562 __cmpexch_failure_order(__m));
563 }
564
565 bool
566 compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
567 memory_order __m = memory_order_seq_cst) volatile noexcept
568 {
569 return compare_exchange_weak(__p1, __p2, __m,
570 __cmpexch_failure_order(__m));
571 }
572
573 bool
574 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
575 memory_order __m1, memory_order __m2) noexcept
576 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
577
578 bool
579 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
580 memory_order __m1,
581 memory_order __m2) volatile noexcept
582 { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
583
584 bool
585 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
586 memory_order __m = memory_order_seq_cst) noexcept
587 {
588 return _M_b.compare_exchange_strong(__p1, __p2, __m,
589 __cmpexch_failure_order(__m));
590 }
591
592 bool
593 compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
594 memory_order __m = memory_order_seq_cst) volatile noexcept
595 {
596 return _M_b.compare_exchange_strong(__p1, __p2, __m,
597 __cmpexch_failure_order(__m));
598 }
599
600 __pointer_type
601 fetch_add(ptrdiff_t __d,
602 memory_order __m = memory_order_seq_cst) noexcept
603 {
604#if __cplusplus >= 201703L
605 static_assert( is_object<_Tp>::value, "pointer to object type" );
606#endif
607 return _M_b.fetch_add(__d, __m);
608 }
609
610 __pointer_type
611 fetch_add(ptrdiff_t __d,
612 memory_order __m = memory_order_seq_cst) volatile noexcept
613 {
614#if __cplusplus >= 201703L
615 static_assert( is_object<_Tp>::value, "pointer to object type" );
616#endif
617 return _M_b.fetch_add(__d, __m);
618 }
619
620 __pointer_type
621 fetch_sub(ptrdiff_t __d,
622 memory_order __m = memory_order_seq_cst) noexcept
623 {
624#if __cplusplus >= 201703L
625 static_assert( is_object<_Tp>::value, "pointer to object type" );
626#endif
627 return _M_b.fetch_sub(__d, __m);
628 }
629
630 __pointer_type
631 fetch_sub(ptrdiff_t __d,
632 memory_order __m = memory_order_seq_cst) volatile noexcept
633 {
634#if __cplusplus >= 201703L
635 static_assert( is_object<_Tp>::value, "pointer to object type" );
636#endif
637 return _M_b.fetch_sub(__d, __m);
638 }
639 };
640
641
642 /// Explicit specialization for char.
643 template<>
644 struct atomic<char> : __atomic_base<char>
645 {
646 typedef char __integral_type;
647 typedef __atomic_base<char> __base_type;
648
649 atomic() noexcept = default;
650 ~atomic() noexcept = default;
651 atomic(const atomic&) = delete;
652 atomic& operator=(const atomic&) = delete;
653 atomic& operator=(const atomic&) volatile = delete;
654
655 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
656
657 using __base_type::operator __integral_type;
658 using __base_type::operator=;
659
660#if __cplusplus >= 201703L
661 static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
662#endif
663 };
664
665 /// Explicit specialization for signed char.
666 template<>
667 struct atomic<signed char> : __atomic_base<signed char>
668 {
669 typedef signed char __integral_type;
670 typedef __atomic_base<signed char> __base_type;
671
672 atomic() noexcept= default;
673 ~atomic() noexcept = default;
674 atomic(const atomic&) = delete;
675 atomic& operator=(const atomic&) = delete;
676 atomic& operator=(const atomic&) volatile = delete;
677
678 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
679
680 using __base_type::operator __integral_type;
681 using __base_type::operator=;
682
683#if __cplusplus >= 201703L
684 static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
685#endif
686 };
687
688 /// Explicit specialization for unsigned char.
689 template<>
690 struct atomic<unsigned char> : __atomic_base<unsigned char>
691 {
692 typedef unsigned char __integral_type;
693 typedef __atomic_base<unsigned char> __base_type;
694
695 atomic() noexcept= default;
696 ~atomic() noexcept = default;
697 atomic(const atomic&) = delete;
698 atomic& operator=(const atomic&) = delete;
699 atomic& operator=(const atomic&) volatile = delete;
700
701 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
702
703 using __base_type::operator __integral_type;
704 using __base_type::operator=;
705
706#if __cplusplus >= 201703L
707 static constexpr bool is_always_lock_free = ATOMIC_CHAR_LOCK_FREE == 2;
708#endif
709 };
710
711 /// Explicit specialization for short.
712 template<>
713 struct atomic<short> : __atomic_base<short>
714 {
715 typedef short __integral_type;
716 typedef __atomic_base<short> __base_type;
717
718 atomic() noexcept = default;
719 ~atomic() noexcept = default;
720 atomic(const atomic&) = delete;
721 atomic& operator=(const atomic&) = delete;
722 atomic& operator=(const atomic&) volatile = delete;
723
724 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
725
726 using __base_type::operator __integral_type;
727 using __base_type::operator=;
728
729#if __cplusplus >= 201703L
730 static constexpr bool is_always_lock_free = ATOMIC_SHORT_LOCK_FREE == 2;
731#endif
732 };
733
734 /// Explicit specialization for unsigned short.
735 template<>
736 struct atomic<unsigned short> : __atomic_base<unsigned short>
737 {
738 typedef unsigned short __integral_type;
739 typedef __atomic_base<unsigned short> __base_type;
740
741 atomic() noexcept = default;
742 ~atomic() noexcept = default;
743 atomic(const atomic&) = delete;
744 atomic& operator=(const atomic&) = delete;
745 atomic& operator=(const atomic&) volatile = delete;
746
747 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
748
749 using __base_type::operator __integral_type;
750 using __base_type::operator=;
751
752#if __cplusplus >= 201703L
753 static constexpr bool is_always_lock_free = ATOMIC_SHORT_LOCK_FREE == 2;
754#endif
755 };
756
757 /// Explicit specialization for int.
758 template<>
759 struct atomic<int> : __atomic_base<int>
760 {
761 typedef int __integral_type;
762 typedef __atomic_base<int> __base_type;
763
764 atomic() noexcept = default;
765 ~atomic() noexcept = default;
766 atomic(const atomic&) = delete;
767 atomic& operator=(const atomic&) = delete;
768 atomic& operator=(const atomic&) volatile = delete;
769
770 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
771
772 using __base_type::operator __integral_type;
773 using __base_type::operator=;
774
775#if __cplusplus >= 201703L
776 static constexpr bool is_always_lock_free = ATOMIC_INT_LOCK_FREE == 2;
777#endif
778 };
779
780 /// Explicit specialization for unsigned int.
781 template<>
782 struct atomic<unsigned int> : __atomic_base<unsigned int>
783 {
784 typedef unsigned int __integral_type;
785 typedef __atomic_base<unsigned int> __base_type;
786
787 atomic() noexcept = default;
788 ~atomic() noexcept = default;
789 atomic(const atomic&) = delete;
790 atomic& operator=(const atomic&) = delete;
791 atomic& operator=(const atomic&) volatile = delete;
792
793 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
794
795 using __base_type::operator __integral_type;
796 using __base_type::operator=;
797
798#if __cplusplus >= 201703L
799 static constexpr bool is_always_lock_free = ATOMIC_INT_LOCK_FREE == 2;
800#endif
801 };
802
803 /// Explicit specialization for long.
804 template<>
805 struct atomic<long> : __atomic_base<long>
806 {
807 typedef long __integral_type;
808 typedef __atomic_base<long> __base_type;
809
810 atomic() noexcept = default;
811 ~atomic() noexcept = default;
812 atomic(const atomic&) = delete;
813 atomic& operator=(const atomic&) = delete;
814 atomic& operator=(const atomic&) volatile = delete;
815
816 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
817
818 using __base_type::operator __integral_type;
819 using __base_type::operator=;
820
821#if __cplusplus >= 201703L
822 static constexpr bool is_always_lock_free = ATOMIC_LONG_LOCK_FREE == 2;
823#endif
824 };
825
826 /// Explicit specialization for unsigned long.
827 template<>
828 struct atomic<unsigned long> : __atomic_base<unsigned long>
829 {
830 typedef unsigned long __integral_type;
831 typedef __atomic_base<unsigned long> __base_type;
832
833 atomic() noexcept = default;
834 ~atomic() noexcept = default;
835 atomic(const atomic&) = delete;
836 atomic& operator=(const atomic&) = delete;
837 atomic& operator=(const atomic&) volatile = delete;
838
839 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
840
841 using __base_type::operator __integral_type;
842 using __base_type::operator=;
843
844#if __cplusplus >= 201703L
845 static constexpr bool is_always_lock_free = ATOMIC_LONG_LOCK_FREE == 2;
846#endif
847 };
848
849 /// Explicit specialization for long long.
850 template<>
851 struct atomic<long long> : __atomic_base<long long>
852 {
853 typedef long long __integral_type;
854 typedef __atomic_base<long long> __base_type;
855
856 atomic() noexcept = default;
857 ~atomic() noexcept = default;
858 atomic(const atomic&) = delete;
859 atomic& operator=(const atomic&) = delete;
860 atomic& operator=(const atomic&) volatile = delete;
861
862 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
863
864 using __base_type::operator __integral_type;
865 using __base_type::operator=;
866
867#if __cplusplus >= 201703L
868 static constexpr bool is_always_lock_free = ATOMIC_LLONG_LOCK_FREE == 2;
869#endif
870 };
871
872 /// Explicit specialization for unsigned long long.
873 template<>
874 struct atomic<unsigned long long> : __atomic_base<unsigned long long>
875 {
876 typedef unsigned long long __integral_type;
877 typedef __atomic_base<unsigned long long> __base_type;
878
879 atomic() noexcept = default;
880 ~atomic() noexcept = default;
881 atomic(const atomic&) = delete;
882 atomic& operator=(const atomic&) = delete;
883 atomic& operator=(const atomic&) volatile = delete;
884
885 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
886
887 using __base_type::operator __integral_type;
888 using __base_type::operator=;
889
890#if __cplusplus >= 201703L
891 static constexpr bool is_always_lock_free = ATOMIC_LLONG_LOCK_FREE == 2;
892#endif
893 };
894
895 /// Explicit specialization for wchar_t.
896 template<>
897 struct atomic<wchar_t> : __atomic_base<wchar_t>
898 {
899 typedef wchar_t __integral_type;
900 typedef __atomic_base<wchar_t> __base_type;
901
902 atomic() noexcept = default;
903 ~atomic() noexcept = default;
904 atomic(const atomic&) = delete;
905 atomic& operator=(const atomic&) = delete;
906 atomic& operator=(const atomic&) volatile = delete;
907
908 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
909
910 using __base_type::operator __integral_type;
911 using __base_type::operator=;
912
913#if __cplusplus >= 201703L
914 static constexpr bool is_always_lock_free = ATOMIC_WCHAR_T_LOCK_FREE == 2;
915#endif
916 };
917
918#ifdef _GLIBCXX_USE_CHAR8_T
919 /// Explicit specialization for char8_t.
920 template<>
921 struct atomic<char8_t> : __atomic_base<char8_t>
922 {
923 typedef char8_t __integral_type;
924 typedef __atomic_base<char8_t> __base_type;
925
926 atomic() noexcept = default;
927 ~atomic() noexcept = default;
928 atomic(const atomic&) = delete;
929 atomic& operator=(const atomic&) = delete;
930 atomic& operator=(const atomic&) volatile = delete;
931
932 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
933
934 using __base_type::operator __integral_type;
935 using __base_type::operator=;
936
937#if __cplusplus > 201402L
938 static constexpr bool is_always_lock_free = ATOMIC_CHAR8_T_LOCK_FREE == 2;
939#endif
940 };
941#endif
942
943 /// Explicit specialization for char16_t.
944 template<>
945 struct atomic<char16_t> : __atomic_base<char16_t>
946 {
947 typedef char16_t __integral_type;
948 typedef __atomic_base<char16_t> __base_type;
949
950 atomic() noexcept = default;
951 ~atomic() noexcept = default;
952 atomic(const atomic&) = delete;
953 atomic& operator=(const atomic&) = delete;
954 atomic& operator=(const atomic&) volatile = delete;
955
956 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
957
958 using __base_type::operator __integral_type;
959 using __base_type::operator=;
960
961#if __cplusplus >= 201703L
962 static constexpr bool is_always_lock_free = ATOMIC_CHAR16_T_LOCK_FREE == 2;
963#endif
964 };
965
966 /// Explicit specialization for char32_t.
967 template<>
968 struct atomic<char32_t> : __atomic_base<char32_t>
969 {
970 typedef char32_t __integral_type;
971 typedef __atomic_base<char32_t> __base_type;
972
973 atomic() noexcept = default;
974 ~atomic() noexcept = default;
975 atomic(const atomic&) = delete;
976 atomic& operator=(const atomic&) = delete;
977 atomic& operator=(const atomic&) volatile = delete;
978
979 constexpr atomic(__integral_type __i) noexcept : __base_type(__i) { }
980
981 using __base_type::operator __integral_type;
982 using __base_type::operator=;
983
984#if __cplusplus >= 201703L
985 static constexpr bool is_always_lock_free = ATOMIC_CHAR32_T_LOCK_FREE == 2;
986#endif
987 };
988
989
990 /// atomic_bool
991 typedef atomic<bool> atomic_bool;
992
993 /// atomic_char
994 typedef atomic<char> atomic_char;
995
996 /// atomic_schar
997 typedef atomic<signed char> atomic_schar;
998
999 /// atomic_uchar
1000 typedef atomic<unsigned char> atomic_uchar;
1001
1002 /// atomic_short
1003 typedef atomic<short> atomic_short;
1004
1005 /// atomic_ushort
1006 typedef atomic<unsigned short> atomic_ushort;
1007
1008 /// atomic_int
1009 typedef atomic<int> atomic_int;
1010
1011 /// atomic_uint
1012 typedef atomic<unsigned int> atomic_uint;
1013
1014 /// atomic_long
1015 typedef atomic<long> atomic_long;
1016
1017 /// atomic_ulong
1018 typedef atomic<unsigned long> atomic_ulong;
1019
1020 /// atomic_llong
1021 typedef atomic<long long> atomic_llong;
1022
1023 /// atomic_ullong
1024 typedef atomic<unsigned long long> atomic_ullong;
1025
1026 /// atomic_wchar_t
1027 typedef atomic<wchar_t> atomic_wchar_t;
1028
1029#ifdef _GLIBCXX_USE_CHAR8_T
1030 /// atomic_char8_t
1031 typedef atomic<char8_t> atomic_char8_t;
1032#endif
1033
1034 /// atomic_char16_t
1035 typedef atomic<char16_t> atomic_char16_t;
1036
1037 /// atomic_char32_t
1038 typedef atomic<char32_t> atomic_char32_t;
1039
1040#ifdef _GLIBCXX_USE_C99_STDINT_TR1
1041 // _GLIBCXX_RESOLVE_LIB_DEFECTS
1042 // 2441. Exact-width atomic typedefs should be provided
1043
1044 /// atomic_int8_t
1045 typedef atomic<int8_t> atomic_int8_t;
1046
1047 /// atomic_uint8_t
1048 typedef atomic<uint8_t> atomic_uint8_t;
1049
1050 /// atomic_int16_t
1051 typedef atomic<int16_t> atomic_int16_t;
1052
1053 /// atomic_uint16_t
1054 typedef atomic<uint16_t> atomic_uint16_t;
1055
1056 /// atomic_int32_t
1057 typedef atomic<int32_t> atomic_int32_t;
1058
1059 /// atomic_uint32_t
1060 typedef atomic<uint32_t> atomic_uint32_t;
1061
1062 /// atomic_int64_t
1063 typedef atomic<int64_t> atomic_int64_t;
1064
1065 /// atomic_uint64_t
1066 typedef atomic<uint64_t> atomic_uint64_t;
1067
1068
1069 /// atomic_int_least8_t
1070 typedef atomic<int_least8_t> atomic_int_least8_t;
1071
1072 /// atomic_uint_least8_t
1073 typedef atomic<uint_least8_t> atomic_uint_least8_t;
1074
1075 /// atomic_int_least16_t
1076 typedef atomic<int_least16_t> atomic_int_least16_t;
1077
1078 /// atomic_uint_least16_t
1079 typedef atomic<uint_least16_t> atomic_uint_least16_t;
1080
1081 /// atomic_int_least32_t
1082 typedef atomic<int_least32_t> atomic_int_least32_t;
1083
1084 /// atomic_uint_least32_t
1085 typedef atomic<uint_least32_t> atomic_uint_least32_t;
1086
1087 /// atomic_int_least64_t
1088 typedef atomic<int_least64_t> atomic_int_least64_t;
1089
1090 /// atomic_uint_least64_t
1091 typedef atomic<uint_least64_t> atomic_uint_least64_t;
1092
1093
1094 /// atomic_int_fast8_t
1095 typedef atomic<int_fast8_t> atomic_int_fast8_t;
1096
1097 /// atomic_uint_fast8_t
1098 typedef atomic<uint_fast8_t> atomic_uint_fast8_t;
1099
1100 /// atomic_int_fast16_t
1101 typedef atomic<int_fast16_t> atomic_int_fast16_t;
1102
1103 /// atomic_uint_fast16_t
1104 typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
1105
1106 /// atomic_int_fast32_t
1107 typedef atomic<int_fast32_t> atomic_int_fast32_t;
1108
1109 /// atomic_uint_fast32_t
1110 typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
1111
1112 /// atomic_int_fast64_t
1113 typedef atomic<int_fast64_t> atomic_int_fast64_t;
1114
1115 /// atomic_uint_fast64_t
1116 typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
1117#endif
1118
1119
1120 /// atomic_intptr_t
1121 typedef atomic<intptr_t> atomic_intptr_t;
1122
1123 /// atomic_uintptr_t
1124 typedef atomic<uintptr_t> atomic_uintptr_t;
1125
1126 /// atomic_size_t
1127 typedef atomic<size_t> atomic_size_t;
1128
1129 /// atomic_ptrdiff_t
1130 typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
1131
1132#ifdef _GLIBCXX_USE_C99_STDINT_TR1
1133 /// atomic_intmax_t
1134 typedef atomic<intmax_t> atomic_intmax_t;
1135
1136 /// atomic_uintmax_t
1137 typedef atomic<uintmax_t> atomic_uintmax_t;
1138#endif
1139
1140 // Function definitions, atomic_flag operations.
1141 inline bool
1142 atomic_flag_test_and_set_explicit(atomic_flag* __a,
1143 memory_order __m) noexcept
1144 { return __a->test_and_set(__m); }
1145
1146 inline bool
1147 atomic_flag_test_and_set_explicit(volatile atomic_flag* __a,
1148 memory_order __m) noexcept
1149 { return __a->test_and_set(__m); }
1150
1151 inline void
1152 atomic_flag_clear_explicit(atomic_flag* __a, memory_order __m) noexcept
1153 { __a->clear(__m); }
1154
1155 inline void
1156 atomic_flag_clear_explicit(volatile atomic_flag* __a,
1157 memory_order __m) noexcept
1158 { __a->clear(__m); }
1159
1160 inline bool
1161 atomic_flag_test_and_set(atomic_flag* __a) noexcept
1162 { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
1163
1164 inline bool
1165 atomic_flag_test_and_set(volatile atomic_flag* __a) noexcept
1166 { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
1167
1168 inline void
1169 atomic_flag_clear(atomic_flag* __a) noexcept
1170 { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
1171
1172 inline void
1173 atomic_flag_clear(volatile atomic_flag* __a) noexcept
1174 { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
1175
1176 // _GLIBCXX_RESOLVE_LIB_DEFECTS
1177 // 3220. P0558 broke conforming C++14 uses of atomic shared_ptr
1178 template<typename _Tp>
1179 using __atomic_val_t = __type_identity_t<_Tp>;
1180 template<typename _Tp>
1181 using __atomic_diff_t = typename atomic<_Tp>::difference_type;
1182
1183 // [atomics.nonmembers] Non-member functions.
1184 // Function templates generally applicable to atomic types.
1185 template<typename _ITp>
1186 inline bool
1187 atomic_is_lock_free(const atomic<_ITp>* __a) noexcept
1188 { return __a->is_lock_free(); }
1189
1190 template<typename _ITp>
1191 inline bool
1192 atomic_is_lock_free(const volatile atomic<_ITp>* __a) noexcept
1193 { return __a->is_lock_free(); }
1194
1195 template<typename _ITp>
1196 inline void
1197 atomic_init(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1198 { __a->store(__i, memory_order_relaxed); }
1199
1200 template<typename _ITp>
1201 inline void
1202 atomic_init(volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1203 { __a->store(__i, memory_order_relaxed); }
1204
1205 template<typename _ITp>
1206 inline void
1207 atomic_store_explicit(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
1208 memory_order __m) noexcept
1209 { __a->store(__i, __m); }
1210
1211 template<typename _ITp>
1212 inline void
1213 atomic_store_explicit(volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
1214 memory_order __m) noexcept
1215 { __a->store(__i, __m); }
1216
1217 template<typename _ITp>
1218 inline _ITp
1219 atomic_load_explicit(const atomic<_ITp>* __a, memory_order __m) noexcept
1220 { return __a->load(__m); }
1221
1222 template<typename _ITp>
1223 inline _ITp
1224 atomic_load_explicit(const volatile atomic<_ITp>* __a,
1225 memory_order __m) noexcept
1226 { return __a->load(__m); }
1227
1228 template<typename _ITp>
1229 inline _ITp
1230 atomic_exchange_explicit(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i,
1231 memory_order __m) noexcept
1232 { return __a->exchange(__i, __m); }
1233
1234 template<typename _ITp>
1235 inline _ITp
1236 atomic_exchange_explicit(volatile atomic<_ITp>* __a,
1237 __atomic_val_t<_ITp> __i,
1238 memory_order __m) noexcept
1239 { return __a->exchange(__i, __m); }
1240
1241 template<typename _ITp>
1242 inline bool
1243 atomic_compare_exchange_weak_explicit(atomic<_ITp>* __a,
1244 __atomic_val_t<_ITp>* __i1,
1245 __atomic_val_t<_ITp> __i2,
1246 memory_order __m1,
1247 memory_order __m2) noexcept
1248 { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
1249
1250 template<typename _ITp>
1251 inline bool
1252 atomic_compare_exchange_weak_explicit(volatile atomic<_ITp>* __a,
1253 __atomic_val_t<_ITp>* __i1,
1254 __atomic_val_t<_ITp> __i2,
1255 memory_order __m1,
1256 memory_order __m2) noexcept
1257 { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
1258
1259 template<typename _ITp>
1260 inline bool
1261 atomic_compare_exchange_strong_explicit(atomic<_ITp>* __a,
1262 __atomic_val_t<_ITp>* __i1,
1263 __atomic_val_t<_ITp> __i2,
1264 memory_order __m1,
1265 memory_order __m2) noexcept
1266 { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
1267
1268 template<typename _ITp>
1269 inline bool
1270 atomic_compare_exchange_strong_explicit(volatile atomic<_ITp>* __a,
1271 __atomic_val_t<_ITp>* __i1,
1272 __atomic_val_t<_ITp> __i2,
1273 memory_order __m1,
1274 memory_order __m2) noexcept
1275 { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
1276
1277
1278 template<typename _ITp>
1279 inline void
1280 atomic_store(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1281 { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
1282
1283 template<typename _ITp>
1284 inline void
1285 atomic_store(volatile atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1286 { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
1287
1288 template<typename _ITp>
1289 inline _ITp
1290 atomic_load(const atomic<_ITp>* __a) noexcept
1291 { return atomic_load_explicit(__a, memory_order_seq_cst); }
1292
1293 template<typename _ITp>
1294 inline _ITp
1295 atomic_load(const volatile atomic<_ITp>* __a) noexcept
1296 { return atomic_load_explicit(__a, memory_order_seq_cst); }
1297
1298 template<typename _ITp>
1299 inline _ITp
1300 atomic_exchange(atomic<_ITp>* __a, __atomic_val_t<_ITp> __i) noexcept
1301 { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
1302
1303 template<typename _ITp>
1304 inline _ITp
1305 atomic_exchange(volatile atomic<_ITp>* __a,
1306 __atomic_val_t<_ITp> __i) noexcept
1307 { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
1308
1309 template<typename _ITp>
1310 inline bool
1311 atomic_compare_exchange_weak(atomic<_ITp>* __a,
1312 __atomic_val_t<_ITp>* __i1,
1313 __atomic_val_t<_ITp> __i2) noexcept
1314 {
1315 return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
1316 memory_order_seq_cst,
1317 memory_order_seq_cst);
1318 }
1319
1320 template<typename _ITp>
1321 inline bool
1322 atomic_compare_exchange_weak(volatile atomic<_ITp>* __a,
1323 __atomic_val_t<_ITp>* __i1,
1324 __atomic_val_t<_ITp> __i2) noexcept
1325 {
1326 return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
1327 memory_order_seq_cst,
1328 memory_order_seq_cst);
1329 }
1330
1331 template<typename _ITp>
1332 inline bool
1333 atomic_compare_exchange_strong(atomic<_ITp>* __a,
1334 __atomic_val_t<_ITp>* __i1,
1335 __atomic_val_t<_ITp> __i2) noexcept
1336 {
1337 return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
1338 memory_order_seq_cst,
1339 memory_order_seq_cst);
1340 }
1341
1342 template<typename _ITp>
1343 inline bool
1344 atomic_compare_exchange_strong(volatile atomic<_ITp>* __a,
1345 __atomic_val_t<_ITp>* __i1,
1346 __atomic_val_t<_ITp> __i2) noexcept
1347 {
1348 return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
1349 memory_order_seq_cst,
1350 memory_order_seq_cst);
1351 }
1352
1353 // Function templates for atomic_integral and atomic_pointer operations only.
1354 // Some operations (and, or, xor) are only available for atomic integrals,
1355 // which is implemented by taking a parameter of type __atomic_base<_ITp>*.
1356
1357 template<typename _ITp>
1358 inline _ITp
1359 atomic_fetch_add_explicit(atomic<_ITp>* __a,
1360 __atomic_diff_t<_ITp> __i,
1361 memory_order __m) noexcept
1362 { return __a->fetch_add(__i, __m); }
1363
1364 template<typename _ITp>
1365 inline _ITp
1366 atomic_fetch_add_explicit(volatile atomic<_ITp>* __a,
1367 __atomic_diff_t<_ITp> __i,
1368 memory_order __m) noexcept
1369 { return __a->fetch_add(__i, __m); }
1370
1371 template<typename _ITp>
1372 inline _ITp
1373 atomic_fetch_sub_explicit(atomic<_ITp>* __a,
1374 __atomic_diff_t<_ITp> __i,
1375 memory_order __m) noexcept
1376 { return __a->fetch_sub(__i, __m); }
1377
1378 template<typename _ITp>
1379 inline _ITp
1380 atomic_fetch_sub_explicit(volatile atomic<_ITp>* __a,
1381 __atomic_diff_t<_ITp> __i,
1382 memory_order __m) noexcept
1383 { return __a->fetch_sub(__i, __m); }
1384
1385 template<typename _ITp>
1386 inline _ITp
1387 atomic_fetch_and_explicit(__atomic_base<_ITp>* __a,
1388 __atomic_val_t<_ITp> __i,
1389 memory_order __m) noexcept
1390 { return __a->fetch_and(__i, __m); }
1391
1392 template<typename _ITp>
1393 inline _ITp
1394 atomic_fetch_and_explicit(volatile __atomic_base<_ITp>* __a,
1395 __atomic_val_t<_ITp> __i,
1396 memory_order __m) noexcept
1397 { return __a->fetch_and(__i, __m); }
1398
1399 template<typename _ITp>
1400 inline _ITp
1401 atomic_fetch_or_explicit(__atomic_base<_ITp>* __a,
1402 __atomic_val_t<_ITp> __i,
1403 memory_order __m) noexcept
1404 { return __a->fetch_or(__i, __m); }
1405
1406 template<typename _ITp>
1407 inline _ITp
1408 atomic_fetch_or_explicit(volatile __atomic_base<_ITp>* __a,
1409 __atomic_val_t<_ITp> __i,
1410 memory_order __m) noexcept
1411 { return __a->fetch_or(__i, __m); }
1412
1413 template<typename _ITp>
1414 inline _ITp
1415 atomic_fetch_xor_explicit(__atomic_base<_ITp>* __a,
1416 __atomic_val_t<_ITp> __i,
1417 memory_order __m) noexcept
1418 { return __a->fetch_xor(__i, __m); }
1419
1420 template<typename _ITp>
1421 inline _ITp
1422 atomic_fetch_xor_explicit(volatile __atomic_base<_ITp>* __a,
1423 __atomic_val_t<_ITp> __i,
1424 memory_order __m) noexcept
1425 { return __a->fetch_xor(__i, __m); }
1426
1427 template<typename _ITp>
1428 inline _ITp
1429 atomic_fetch_add(atomic<_ITp>* __a,
1430 __atomic_diff_t<_ITp> __i) noexcept
1431 { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
1432
1433 template<typename _ITp>
1434 inline _ITp
1435 atomic_fetch_add(volatile atomic<_ITp>* __a,
1436 __atomic_diff_t<_ITp> __i) noexcept
1437 { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
1438
1439 template<typename _ITp>
1440 inline _ITp
1441 atomic_fetch_sub(atomic<_ITp>* __a,
1442 __atomic_diff_t<_ITp> __i) noexcept
1443 { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
1444
1445 template<typename _ITp>
1446 inline _ITp
1447 atomic_fetch_sub(volatile atomic<_ITp>* __a,
1448 __atomic_diff_t<_ITp> __i) noexcept
1449 { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
1450
1451 template<typename _ITp>
1452 inline _ITp
1453 atomic_fetch_and(__atomic_base<_ITp>* __a,
1454 __atomic_val_t<_ITp> __i) noexcept
1455 { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
1456
1457 template<typename _ITp>
1458 inline _ITp
1459 atomic_fetch_and(volatile __atomic_base<_ITp>* __a,
1460 __atomic_val_t<_ITp> __i) noexcept
1461 { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
1462
1463 template<typename _ITp>
1464 inline _ITp
1465 atomic_fetch_or(__atomic_base<_ITp>* __a,
1466 __atomic_val_t<_ITp> __i) noexcept
1467 { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
1468
1469 template<typename _ITp>
1470 inline _ITp
1471 atomic_fetch_or(volatile __atomic_base<_ITp>* __a,
1472 __atomic_val_t<_ITp> __i) noexcept
1473 { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
1474
1475 template<typename _ITp>
1476 inline _ITp
1477 atomic_fetch_xor(__atomic_base<_ITp>* __a,
1478 __atomic_val_t<_ITp> __i) noexcept
1479 { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
1480
1481 template<typename _ITp>
1482 inline _ITp
1483 atomic_fetch_xor(volatile __atomic_base<_ITp>* __a,
1484 __atomic_val_t<_ITp> __i) noexcept
1485 { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
1486
1487#if __cplusplus > 201703L
1488#define __cpp_lib_atomic_float 201711L
1489 template<>
1490 struct atomic<float> : __atomic_float<float>
1491 {
1492 atomic() noexcept = default;
1493
1494 constexpr
1495 atomic(float __fp) noexcept : __atomic_float<float>(__fp)
1496 { }
1497
1498 atomic& operator=(const atomic&) volatile = delete;
1499 atomic& operator=(const atomic&) = delete;
1500
1501 using __atomic_float<float>::operator=;
1502 };
1503
1504 template<>
1505 struct atomic<double> : __atomic_float<double>
1506 {
1507 atomic() noexcept = default;
1508
1509 constexpr
1510 atomic(double __fp) noexcept : __atomic_float<double>(__fp)
1511 { }
1512
1513 atomic& operator=(const atomic&) volatile = delete;
1514 atomic& operator=(const atomic&) = delete;
1515
1516 using __atomic_float<double>::operator=;
1517 };
1518
1519 template<>
1520 struct atomic<long double> : __atomic_float<long double>
1521 {
1522 atomic() noexcept = default;
1523
1524 constexpr
1525 atomic(long double __fp) noexcept : __atomic_float<long double>(__fp)
1526 { }
1527
1528 atomic& operator=(const atomic&) volatile = delete;
1529 atomic& operator=(const atomic&) = delete;
1530
1531 using __atomic_float<long double>::operator=;
1532 };
1533
1534#define __cpp_lib_atomic_ref 201806L
1535
1536 /// Class template to provide atomic operations on a non-atomic variable.
1537 template<typename _Tp>
1538 struct atomic_ref : __atomic_ref<_Tp>
1539 {
1540 explicit
1541 atomic_ref(_Tp& __t) noexcept : __atomic_ref<_Tp>(__t)
1542 { }
1543
1544 atomic_ref& operator=(const atomic_ref&) = delete;
1545
1546 atomic_ref(const atomic_ref&) = default;
1547
1548 using __atomic_ref<_Tp>::operator=;
1549 };
1550
1551#endif // C++2a
1552
1553 /// @} group atomics
1554
1555_GLIBCXX_END_NAMESPACE_VERSION
1556} // namespace
1557
1558#endif // C++11
1559
1560#endif // _GLIBCXX_ATOMIC