1//===----------------------------------------------------------------------===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8
9#ifndef _LIBCPP___ATOMIC_ATOMIC_H
10#define _LIBCPP___ATOMIC_ATOMIC_H
11
12#include <__atomic/atomic_sync.h>
13#include <__atomic/check_memory_order.h>
14#include <__atomic/is_always_lock_free.h>
15#include <__atomic/memory_order.h>
16#include <__atomic/support.h>
17#include <__config>
18#include <__cstddef/ptrdiff_t.h>
19#include <__memory/addressof.h>
20#include <__type_traits/enable_if.h>
21#include <__type_traits/is_floating_point.h>
22#include <__type_traits/is_function.h>
23#include <__type_traits/is_integral.h>
24#include <__type_traits/is_nothrow_constructible.h>
25#include <__type_traits/is_same.h>
26#include <__type_traits/is_trivially_copyable.h>
27#include <__type_traits/remove_const.h>
28#include <__type_traits/remove_pointer.h>
29#include <__type_traits/remove_volatile.h>
30#include <__utility/forward.h>
31#include <cstring>
32
33#if !defined(_LIBCPP_HAS_NO_PRAGMA_SYSTEM_HEADER)
34# pragma GCC system_header
35#endif
36
37_LIBCPP_BEGIN_NAMESPACE_STD
38
39template <class _Tp, bool = is_integral<_Tp>::value && !is_same<_Tp, bool>::value>
40struct __atomic_base // false
41{
42 mutable __cxx_atomic_impl<_Tp> __a_;
43
44 using value_type = _Tp;
45
46#if _LIBCPP_STD_VER >= 17
47 static constexpr bool is_always_lock_free = __libcpp_is_always_lock_free<__cxx_atomic_impl<_Tp> >::__value;
48#endif
49
50 _LIBCPP_HIDE_FROM_ABI bool is_lock_free() const volatile _NOEXCEPT {
51 return __cxx_atomic_is_lock_free(sizeof(__cxx_atomic_impl<_Tp>));
52 }
53 _LIBCPP_HIDE_FROM_ABI bool is_lock_free() const _NOEXCEPT {
54 return static_cast<__atomic_base const volatile*>(this)->is_lock_free();
55 }
56 _LIBCPP_HIDE_FROM_ABI void store(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
57 _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m) {
58 std::__cxx_atomic_store(std::addressof(__a_), __d, __m);
59 }
60 _LIBCPP_HIDE_FROM_ABI void store(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
61 _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m) {
62 std::__cxx_atomic_store(std::addressof(__a_), __d, __m);
63 }
64 _LIBCPP_HIDE_FROM_ABI _Tp load(memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT
65 _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m) {
66 return std::__cxx_atomic_load(std::addressof(__a_), __m);
67 }
68 _LIBCPP_HIDE_FROM_ABI _Tp load(memory_order __m = memory_order_seq_cst) const _NOEXCEPT
69 _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m) {
70 return std::__cxx_atomic_load(std::addressof(__a_), __m);
71 }
72 _LIBCPP_HIDE_FROM_ABI operator _Tp() const volatile _NOEXCEPT { return load(); }
73 _LIBCPP_HIDE_FROM_ABI operator _Tp() const _NOEXCEPT { return load(); }
74 _LIBCPP_HIDE_FROM_ABI _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT {
75 return std::__cxx_atomic_exchange(std::addressof(__a_), __d, __m);
76 }
77 _LIBCPP_HIDE_FROM_ABI _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT {
78 return std::__cxx_atomic_exchange(std::addressof(__a_), __d, __m);
79 }
80 _LIBCPP_HIDE_FROM_ABI bool
81 compare_exchange_weak(_Tp& __e, _Tp __d, memory_order __s, memory_order __f) volatile _NOEXCEPT
82 _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f) {
83 return std::__cxx_atomic_compare_exchange_weak(std::addressof(__a_), std::addressof(__e), __d, __s, __f);
84 }
85 _LIBCPP_HIDE_FROM_ABI bool compare_exchange_weak(_Tp& __e, _Tp __d, memory_order __s, memory_order __f) _NOEXCEPT
86 _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f) {
87 return std::__cxx_atomic_compare_exchange_weak(std::addressof(__a_), std::addressof(__e), __d, __s, __f);
88 }
89 _LIBCPP_HIDE_FROM_ABI bool
90 compare_exchange_strong(_Tp& __e, _Tp __d, memory_order __s, memory_order __f) volatile _NOEXCEPT
91 _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f) {
92 return std::__cxx_atomic_compare_exchange_strong(std::addressof(__a_), std::addressof(__e), __d, __s, __f);
93 }
94 _LIBCPP_HIDE_FROM_ABI bool compare_exchange_strong(_Tp& __e, _Tp __d, memory_order __s, memory_order __f) _NOEXCEPT
95 _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f) {
96 return std::__cxx_atomic_compare_exchange_strong(std::addressof(__a_), std::addressof(__e), __d, __s, __f);
97 }
98 _LIBCPP_HIDE_FROM_ABI bool
99 compare_exchange_weak(_Tp& __e, _Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT {
100 return std::__cxx_atomic_compare_exchange_weak(std::addressof(__a_), std::addressof(__e), __d, __m, __m);
101 }
102 _LIBCPP_HIDE_FROM_ABI bool
103 compare_exchange_weak(_Tp& __e, _Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT {
104 return std::__cxx_atomic_compare_exchange_weak(std::addressof(__a_), std::addressof(__e), __d, __m, __m);
105 }
106 _LIBCPP_HIDE_FROM_ABI bool
107 compare_exchange_strong(_Tp& __e, _Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT {
108 return std::__cxx_atomic_compare_exchange_strong(std::addressof(__a_), std::addressof(__e), __d, __m, __m);
109 }
110 _LIBCPP_HIDE_FROM_ABI bool
111 compare_exchange_strong(_Tp& __e, _Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT {
112 return std::__cxx_atomic_compare_exchange_strong(std::addressof(__a_), std::addressof(__e), __d, __m, __m);
113 }
114
115#if _LIBCPP_STD_VER >= 20
116 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_HIDE_FROM_ABI void wait(_Tp __v, memory_order __m = memory_order_seq_cst) const
117 volatile _NOEXCEPT {
118 std::__atomic_wait(*this, __v, __m);
119 }
120 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_HIDE_FROM_ABI void
121 wait(_Tp __v, memory_order __m = memory_order_seq_cst) const _NOEXCEPT {
122 std::__atomic_wait(*this, __v, __m);
123 }
124 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_HIDE_FROM_ABI void notify_one() volatile _NOEXCEPT {
125 std::__atomic_notify_one(*this);
126 }
127 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_HIDE_FROM_ABI void notify_one() _NOEXCEPT { std::__atomic_notify_one(*this); }
128 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_HIDE_FROM_ABI void notify_all() volatile _NOEXCEPT {
129 std::__atomic_notify_all(*this);
130 }
131 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_HIDE_FROM_ABI void notify_all() _NOEXCEPT { std::__atomic_notify_all(*this); }
132#endif // _LIBCPP_STD_VER >= 20
133
134#if _LIBCPP_STD_VER >= 20
135 _LIBCPP_HIDE_FROM_ABI constexpr __atomic_base() noexcept(is_nothrow_default_constructible_v<_Tp>) : __a_(_Tp()) {}
136#else
137 _LIBCPP_HIDE_FROM_ABI __atomic_base() _NOEXCEPT = default;
138#endif
139
140 _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR __atomic_base(_Tp __d) _NOEXCEPT : __a_(__d) {}
141
142 __atomic_base(const __atomic_base&) = delete;
143};
144
145// atomic<Integral>
146
147template <class _Tp>
148struct __atomic_base<_Tp, true> : public __atomic_base<_Tp, false> {
149 using __base _LIBCPP_NODEBUG = __atomic_base<_Tp, false>;
150
151 using difference_type = typename __base::value_type;
152
153 _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR_SINCE_CXX20 __atomic_base() _NOEXCEPT = default;
154
155 _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR __atomic_base(_Tp __d) _NOEXCEPT : __base(__d) {}
156
157 _LIBCPP_HIDE_FROM_ABI _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT {
158 return std::__cxx_atomic_fetch_add(std::addressof(this->__a_), __op, __m);
159 }
160 _LIBCPP_HIDE_FROM_ABI _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT {
161 return std::__cxx_atomic_fetch_add(std::addressof(this->__a_), __op, __m);
162 }
163 _LIBCPP_HIDE_FROM_ABI _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT {
164 return std::__cxx_atomic_fetch_sub(std::addressof(this->__a_), __op, __m);
165 }
166 _LIBCPP_HIDE_FROM_ABI _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT {
167 return std::__cxx_atomic_fetch_sub(std::addressof(this->__a_), __op, __m);
168 }
169 _LIBCPP_HIDE_FROM_ABI _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT {
170 return std::__cxx_atomic_fetch_and(std::addressof(this->__a_), __op, __m);
171 }
172 _LIBCPP_HIDE_FROM_ABI _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT {
173 return std::__cxx_atomic_fetch_and(std::addressof(this->__a_), __op, __m);
174 }
175 _LIBCPP_HIDE_FROM_ABI _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT {
176 return std::__cxx_atomic_fetch_or(std::addressof(this->__a_), __op, __m);
177 }
178 _LIBCPP_HIDE_FROM_ABI _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT {
179 return std::__cxx_atomic_fetch_or(std::addressof(this->__a_), __op, __m);
180 }
181 _LIBCPP_HIDE_FROM_ABI _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT {
182 return std::__cxx_atomic_fetch_xor(std::addressof(this->__a_), __op, __m);
183 }
184 _LIBCPP_HIDE_FROM_ABI _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT {
185 return std::__cxx_atomic_fetch_xor(std::addressof(this->__a_), __op, __m);
186 }
187
188 _LIBCPP_HIDE_FROM_ABI _Tp operator++(int) volatile _NOEXCEPT { return fetch_add(_Tp(1)); }
189 _LIBCPP_HIDE_FROM_ABI _Tp operator++(int) _NOEXCEPT { return fetch_add(_Tp(1)); }
190 _LIBCPP_HIDE_FROM_ABI _Tp operator--(int) volatile _NOEXCEPT { return fetch_sub(_Tp(1)); }
191 _LIBCPP_HIDE_FROM_ABI _Tp operator--(int) _NOEXCEPT { return fetch_sub(_Tp(1)); }
192 _LIBCPP_HIDE_FROM_ABI _Tp operator++() volatile _NOEXCEPT { return fetch_add(_Tp(1)) + _Tp(1); }
193 _LIBCPP_HIDE_FROM_ABI _Tp operator++() _NOEXCEPT { return fetch_add(_Tp(1)) + _Tp(1); }
194 _LIBCPP_HIDE_FROM_ABI _Tp operator--() volatile _NOEXCEPT { return fetch_sub(_Tp(1)) - _Tp(1); }
195 _LIBCPP_HIDE_FROM_ABI _Tp operator--() _NOEXCEPT { return fetch_sub(_Tp(1)) - _Tp(1); }
196 _LIBCPP_HIDE_FROM_ABI _Tp operator+=(_Tp __op) volatile _NOEXCEPT { return fetch_add(__op) + __op; }
197 _LIBCPP_HIDE_FROM_ABI _Tp operator+=(_Tp __op) _NOEXCEPT { return fetch_add(__op) + __op; }
198 _LIBCPP_HIDE_FROM_ABI _Tp operator-=(_Tp __op) volatile _NOEXCEPT { return fetch_sub(__op) - __op; }
199 _LIBCPP_HIDE_FROM_ABI _Tp operator-=(_Tp __op) _NOEXCEPT { return fetch_sub(__op) - __op; }
200 _LIBCPP_HIDE_FROM_ABI _Tp operator&=(_Tp __op) volatile _NOEXCEPT { return fetch_and(__op) & __op; }
201 _LIBCPP_HIDE_FROM_ABI _Tp operator&=(_Tp __op) _NOEXCEPT { return fetch_and(__op) & __op; }
202 _LIBCPP_HIDE_FROM_ABI _Tp operator|=(_Tp __op) volatile _NOEXCEPT { return fetch_or(__op) | __op; }
203 _LIBCPP_HIDE_FROM_ABI _Tp operator|=(_Tp __op) _NOEXCEPT { return fetch_or(__op) | __op; }
204 _LIBCPP_HIDE_FROM_ABI _Tp operator^=(_Tp __op) volatile _NOEXCEPT { return fetch_xor(__op) ^ __op; }
205 _LIBCPP_HIDE_FROM_ABI _Tp operator^=(_Tp __op) _NOEXCEPT { return fetch_xor(__op) ^ __op; }
206};
207
208// Here we need _IsIntegral because the default template argument is not enough
209// e.g __atomic_base<int> is __atomic_base<int, true>, which inherits from
210// __atomic_base<int, false> and the caller of the wait function is
211// __atomic_base<int, false>. So specializing __atomic_base<_Tp> does not work
212template <class _Tp, bool _IsIntegral>
213struct __atomic_waitable_traits<__atomic_base<_Tp, _IsIntegral> > {
214 static _LIBCPP_HIDE_FROM_ABI _Tp __atomic_load(const __atomic_base<_Tp, _IsIntegral>& __a, memory_order __order) {
215 return __a.load(__order);
216 }
217
218 static _LIBCPP_HIDE_FROM_ABI _Tp
219 __atomic_load(const volatile __atomic_base<_Tp, _IsIntegral>& __this, memory_order __order) {
220 return __this.load(__order);
221 }
222
223 static _LIBCPP_HIDE_FROM_ABI const __cxx_atomic_impl<_Tp>*
224 __atomic_contention_address(const __atomic_base<_Tp, _IsIntegral>& __a) {
225 return std::addressof(__a.__a_);
226 }
227
228 static _LIBCPP_HIDE_FROM_ABI const volatile __cxx_atomic_impl<_Tp>*
229 __atomic_contention_address(const volatile __atomic_base<_Tp, _IsIntegral>& __this) {
230 return std::addressof(__this.__a_);
231 }
232};
233
234template <typename _Tp>
235struct __check_atomic_mandates {
236 using type _LIBCPP_NODEBUG = _Tp;
237 static_assert(is_trivially_copyable<_Tp>::value, "std::atomic<T> requires that 'T' be a trivially copyable type");
238};
239
240template <class _Tp>
241struct atomic : public __atomic_base<typename __check_atomic_mandates<_Tp>::type> {
242 using __base _LIBCPP_NODEBUG = __atomic_base<_Tp>;
243
244#if _LIBCPP_STD_VER >= 20
245 _LIBCPP_HIDE_FROM_ABI atomic() = default;
246#else
247 _LIBCPP_HIDE_FROM_ABI atomic() _NOEXCEPT = default;
248#endif
249
250 _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR atomic(_Tp __d) _NOEXCEPT : __base(__d) {}
251
252 _LIBCPP_HIDE_FROM_ABI _Tp operator=(_Tp __d) volatile _NOEXCEPT {
253 __base::store(__d);
254 return __d;
255 }
256 _LIBCPP_HIDE_FROM_ABI _Tp operator=(_Tp __d) _NOEXCEPT {
257 __base::store(__d);
258 return __d;
259 }
260
261 atomic& operator=(const atomic&) = delete;
262 atomic& operator=(const atomic&) volatile = delete;
263};
264
265// atomic<T*>
266
267template <class _Tp>
268struct atomic<_Tp*> : public __atomic_base<_Tp*> {
269 using __base _LIBCPP_NODEBUG = __atomic_base<_Tp*>;
270
271 using difference_type = ptrdiff_t;
272
273 _LIBCPP_HIDE_FROM_ABI atomic() _NOEXCEPT = default;
274
275 _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR atomic(_Tp* __d) _NOEXCEPT : __base(__d) {}
276
277 _LIBCPP_HIDE_FROM_ABI _Tp* operator=(_Tp* __d) volatile _NOEXCEPT {
278 __base::store(__d);
279 return __d;
280 }
281 _LIBCPP_HIDE_FROM_ABI _Tp* operator=(_Tp* __d) _NOEXCEPT {
282 __base::store(__d);
283 return __d;
284 }
285
286 _LIBCPP_HIDE_FROM_ABI _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT {
287 // __atomic_fetch_add accepts function pointers, guard against them.
288 static_assert(!is_function<__remove_pointer_t<_Tp> >::value, "Pointer to function isn't allowed");
289 return std::__cxx_atomic_fetch_add(std::addressof(this->__a_), __op, __m);
290 }
291
292 _LIBCPP_HIDE_FROM_ABI _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT {
293 // __atomic_fetch_add accepts function pointers, guard against them.
294 static_assert(!is_function<__remove_pointer_t<_Tp> >::value, "Pointer to function isn't allowed");
295 return std::__cxx_atomic_fetch_add(std::addressof(this->__a_), __op, __m);
296 }
297
298 _LIBCPP_HIDE_FROM_ABI _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT {
299 // __atomic_fetch_add accepts function pointers, guard against them.
300 static_assert(!is_function<__remove_pointer_t<_Tp> >::value, "Pointer to function isn't allowed");
301 return std::__cxx_atomic_fetch_sub(std::addressof(this->__a_), __op, __m);
302 }
303
304 _LIBCPP_HIDE_FROM_ABI _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT {
305 // __atomic_fetch_add accepts function pointers, guard against them.
306 static_assert(!is_function<__remove_pointer_t<_Tp> >::value, "Pointer to function isn't allowed");
307 return std::__cxx_atomic_fetch_sub(std::addressof(this->__a_), __op, __m);
308 }
309
310 _LIBCPP_HIDE_FROM_ABI _Tp* operator++(int) volatile _NOEXCEPT { return fetch_add(1); }
311 _LIBCPP_HIDE_FROM_ABI _Tp* operator++(int) _NOEXCEPT { return fetch_add(1); }
312 _LIBCPP_HIDE_FROM_ABI _Tp* operator--(int) volatile _NOEXCEPT { return fetch_sub(1); }
313 _LIBCPP_HIDE_FROM_ABI _Tp* operator--(int) _NOEXCEPT { return fetch_sub(1); }
314 _LIBCPP_HIDE_FROM_ABI _Tp* operator++() volatile _NOEXCEPT { return fetch_add(1) + 1; }
315 _LIBCPP_HIDE_FROM_ABI _Tp* operator++() _NOEXCEPT { return fetch_add(1) + 1; }
316 _LIBCPP_HIDE_FROM_ABI _Tp* operator--() volatile _NOEXCEPT { return fetch_sub(1) - 1; }
317 _LIBCPP_HIDE_FROM_ABI _Tp* operator--() _NOEXCEPT { return fetch_sub(1) - 1; }
318 _LIBCPP_HIDE_FROM_ABI _Tp* operator+=(ptrdiff_t __op) volatile _NOEXCEPT { return fetch_add(__op) + __op; }
319 _LIBCPP_HIDE_FROM_ABI _Tp* operator+=(ptrdiff_t __op) _NOEXCEPT { return fetch_add(__op) + __op; }
320 _LIBCPP_HIDE_FROM_ABI _Tp* operator-=(ptrdiff_t __op) volatile _NOEXCEPT { return fetch_sub(__op) - __op; }
321 _LIBCPP_HIDE_FROM_ABI _Tp* operator-=(ptrdiff_t __op) _NOEXCEPT { return fetch_sub(__op) - __op; }
322
323 atomic& operator=(const atomic&) = delete;
324 atomic& operator=(const atomic&) volatile = delete;
325};
326
327template <class _Tp>
328struct __atomic_waitable_traits<atomic<_Tp> > : __atomic_waitable_traits<__atomic_base<_Tp> > {};
329
330#if _LIBCPP_STD_VER >= 20
331template <class _Tp>
332 requires is_floating_point_v<_Tp>
333struct atomic<_Tp> : __atomic_base<_Tp> {
334private:
335 _LIBCPP_HIDE_FROM_ABI static constexpr bool __is_fp80_long_double() {
336 // Only x87-fp80 long double has 64-bit mantissa
337 return __LDBL_MANT_DIG__ == 64 && std::is_same_v<_Tp, long double>;
338 }
339
340 _LIBCPP_HIDE_FROM_ABI static constexpr bool __has_rmw_builtin() {
341# ifndef _LIBCPP_COMPILER_CLANG_BASED
342 return false;
343# else
344 // The builtin __cxx_atomic_fetch_add errors during compilation for
345 // long double on platforms with fp80 format.
346 // For more details, see
347 // lib/Sema/SemaChecking.cpp function IsAllowedValueType
348 // LLVM Parser does not allow atomicrmw with x86_fp80 type.
349 // if (ValType->isSpecificBuiltinType(BuiltinType::LongDouble) &&
350 // &Context.getTargetInfo().getLongDoubleFormat() ==
351 // &llvm::APFloat::x87DoubleExtended())
352 // For more info
353 // https://github.com/llvm/llvm-project/issues/68602
354 // https://reviews.llvm.org/D53965
355 return !__is_fp80_long_double();
356# endif
357 }
358
359 template <class _This, class _Operation, class _BuiltinOp>
360 _LIBCPP_HIDE_FROM_ABI static _Tp
361 __rmw_op(_This&& __self, _Tp __operand, memory_order __m, _Operation __operation, _BuiltinOp __builtin_op) {
362 if constexpr (__has_rmw_builtin()) {
363 return __builtin_op(std::addressof(std::forward<_This>(__self).__a_), __operand, __m);
364 } else {
365 _Tp __old = __self.load(memory_order_relaxed);
366 _Tp __new = __operation(__old, __operand);
367 while (!__self.compare_exchange_weak(__old, __new, __m, memory_order_relaxed)) {
368# ifdef _LIBCPP_COMPILER_CLANG_BASED
369 if constexpr (__is_fp80_long_double()) {
370 // https://github.com/llvm/llvm-project/issues/47978
371 // clang bug: __old is not updated on failure for atomic<long double>::compare_exchange_weak
372 // Note __old = __self.load(memory_order_relaxed) will not work
373 std::__cxx_atomic_load_inplace(std::addressof(__self.__a_), std::addressof(__old), memory_order_relaxed);
374 }
375# endif
376 __new = __operation(__old, __operand);
377 }
378 return __old;
379 }
380 }
381
382 template <class _This>
383 _LIBCPP_HIDE_FROM_ABI static _Tp __fetch_add(_This&& __self, _Tp __operand, memory_order __m) {
384 auto __builtin_op = [](auto __a, auto __builtin_operand, auto __order) {
385 return std::__cxx_atomic_fetch_add(__a, __builtin_operand, __order);
386 };
387 auto __plus = [](auto __a, auto __b) { return __a + __b; };
388 return __rmw_op(std::forward<_This>(__self), __operand, __m, __plus, __builtin_op);
389 }
390
391 template <class _This>
392 _LIBCPP_HIDE_FROM_ABI static _Tp __fetch_sub(_This&& __self, _Tp __operand, memory_order __m) {
393 auto __builtin_op = [](auto __a, auto __builtin_operand, auto __order) {
394 return std::__cxx_atomic_fetch_sub(__a, __builtin_operand, __order);
395 };
396 auto __minus = [](auto __a, auto __b) { return __a - __b; };
397 return __rmw_op(std::forward<_This>(__self), __operand, __m, __minus, __builtin_op);
398 }
399
400public:
401 using __base _LIBCPP_NODEBUG = __atomic_base<_Tp>;
402 using value_type = _Tp;
403 using difference_type = value_type;
404
405 _LIBCPP_HIDE_FROM_ABI constexpr atomic() noexcept = default;
406 _LIBCPP_HIDE_FROM_ABI constexpr atomic(_Tp __d) noexcept : __base(__d) {}
407
408 atomic(const atomic&) = delete;
409 atomic& operator=(const atomic&) = delete;
410 atomic& operator=(const atomic&) volatile = delete;
411
412 _LIBCPP_HIDE_FROM_ABI _Tp operator=(_Tp __d) volatile noexcept
413 requires __base::is_always_lock_free
414 {
415 __base::store(__d);
416 return __d;
417 }
418 _LIBCPP_HIDE_FROM_ABI _Tp operator=(_Tp __d) noexcept {
419 __base::store(__d);
420 return __d;
421 }
422
423 _LIBCPP_HIDE_FROM_ABI _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) volatile noexcept
424 requires __base::is_always_lock_free
425 {
426 return __fetch_add(*this, __op, __m);
427 }
428
429 _LIBCPP_HIDE_FROM_ABI _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) noexcept {
430 return __fetch_add(*this, __op, __m);
431 }
432
433 _LIBCPP_HIDE_FROM_ABI _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) volatile noexcept
434 requires __base::is_always_lock_free
435 {
436 return __fetch_sub(*this, __op, __m);
437 }
438
439 _LIBCPP_HIDE_FROM_ABI _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) noexcept {
440 return __fetch_sub(*this, __op, __m);
441 }
442
443 _LIBCPP_HIDE_FROM_ABI _Tp operator+=(_Tp __op) volatile noexcept
444 requires __base::is_always_lock_free
445 {
446 return fetch_add(__op) + __op;
447 }
448
449 _LIBCPP_HIDE_FROM_ABI _Tp operator+=(_Tp __op) noexcept { return fetch_add(__op) + __op; }
450
451 _LIBCPP_HIDE_FROM_ABI _Tp operator-=(_Tp __op) volatile noexcept
452 requires __base::is_always_lock_free
453 {
454 return fetch_sub(__op) - __op;
455 }
456
457 _LIBCPP_HIDE_FROM_ABI _Tp operator-=(_Tp __op) noexcept { return fetch_sub(__op) - __op; }
458};
459
460#endif // _LIBCPP_STD_VER >= 20
461
462// atomic_is_lock_free
463
464template <class _Tp>
465_LIBCPP_HIDE_FROM_ABI bool atomic_is_lock_free(const volatile atomic<_Tp>* __o) _NOEXCEPT {
466 return __o->is_lock_free();
467}
468
469template <class _Tp>
470_LIBCPP_HIDE_FROM_ABI bool atomic_is_lock_free(const atomic<_Tp>* __o) _NOEXCEPT {
471 return __o->is_lock_free();
472}
473
474// atomic_init
475
476template <class _Tp>
477_LIBCPP_DEPRECATED_IN_CXX20 _LIBCPP_HIDE_FROM_ABI void
478atomic_init(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT {
479 std::__cxx_atomic_init(std::addressof(__o->__a_), __d);
480}
481
482template <class _Tp>
483_LIBCPP_DEPRECATED_IN_CXX20 _LIBCPP_HIDE_FROM_ABI void
484atomic_init(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT {
485 std::__cxx_atomic_init(std::addressof(__o->__a_), __d);
486}
487
488// atomic_store
489
490template <class _Tp>
491_LIBCPP_HIDE_FROM_ABI void atomic_store(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT {
492 __o->store(__d);
493}
494
495template <class _Tp>
496_LIBCPP_HIDE_FROM_ABI void atomic_store(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT {
497 __o->store(__d);
498}
499
500// atomic_store_explicit
501
502template <class _Tp>
503_LIBCPP_HIDE_FROM_ABI void
504atomic_store_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d, memory_order __m) _NOEXCEPT
505 _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m) {
506 __o->store(__d, __m);
507}
508
509template <class _Tp>
510_LIBCPP_HIDE_FROM_ABI void
511atomic_store_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d, memory_order __m) _NOEXCEPT
512 _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m) {
513 __o->store(__d, __m);
514}
515
516// atomic_load
517
518template <class _Tp>
519_LIBCPP_HIDE_FROM_ABI _Tp atomic_load(const volatile atomic<_Tp>* __o) _NOEXCEPT {
520 return __o->load();
521}
522
523template <class _Tp>
524_LIBCPP_HIDE_FROM_ABI _Tp atomic_load(const atomic<_Tp>* __o) _NOEXCEPT {
525 return __o->load();
526}
527
528// atomic_load_explicit
529
530template <class _Tp>
531_LIBCPP_HIDE_FROM_ABI _Tp atomic_load_explicit(const volatile atomic<_Tp>* __o, memory_order __m) _NOEXCEPT
532 _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m) {
533 return __o->load(__m);
534}
535
536template <class _Tp>
537_LIBCPP_HIDE_FROM_ABI _Tp atomic_load_explicit(const atomic<_Tp>* __o, memory_order __m) _NOEXCEPT
538 _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m) {
539 return __o->load(__m);
540}
541
542// atomic_exchange
543
544template <class _Tp>
545_LIBCPP_HIDE_FROM_ABI _Tp atomic_exchange(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT {
546 return __o->exchange(__d);
547}
548
549template <class _Tp>
550_LIBCPP_HIDE_FROM_ABI _Tp atomic_exchange(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT {
551 return __o->exchange(__d);
552}
553
554// atomic_exchange_explicit
555
556template <class _Tp>
557_LIBCPP_HIDE_FROM_ABI _Tp
558atomic_exchange_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d, memory_order __m) _NOEXCEPT {
559 return __o->exchange(__d, __m);
560}
561
562template <class _Tp>
563_LIBCPP_HIDE_FROM_ABI _Tp
564atomic_exchange_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d, memory_order __m) _NOEXCEPT {
565 return __o->exchange(__d, __m);
566}
567
568// atomic_compare_exchange_weak
569
570template <class _Tp>
571_LIBCPP_HIDE_FROM_ABI bool atomic_compare_exchange_weak(
572 volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e, typename atomic<_Tp>::value_type __d) _NOEXCEPT {
573 return __o->compare_exchange_weak(*__e, __d);
574}
575
576template <class _Tp>
577_LIBCPP_HIDE_FROM_ABI bool atomic_compare_exchange_weak(
578 atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e, typename atomic<_Tp>::value_type __d) _NOEXCEPT {
579 return __o->compare_exchange_weak(*__e, __d);
580}
581
582// atomic_compare_exchange_strong
583
584template <class _Tp>
585_LIBCPP_HIDE_FROM_ABI bool atomic_compare_exchange_strong(
586 volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e, typename atomic<_Tp>::value_type __d) _NOEXCEPT {
587 return __o->compare_exchange_strong(*__e, __d);
588}
589
590template <class _Tp>
591_LIBCPP_HIDE_FROM_ABI bool atomic_compare_exchange_strong(
592 atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e, typename atomic<_Tp>::value_type __d) _NOEXCEPT {
593 return __o->compare_exchange_strong(*__e, __d);
594}
595
596// atomic_compare_exchange_weak_explicit
597
598template <class _Tp>
599_LIBCPP_HIDE_FROM_ABI bool atomic_compare_exchange_weak_explicit(
600 volatile atomic<_Tp>* __o,
601 typename atomic<_Tp>::value_type* __e,
602 typename atomic<_Tp>::value_type __d,
603 memory_order __s,
604 memory_order __f) _NOEXCEPT _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f) {
605 return __o->compare_exchange_weak(*__e, __d, __s, __f);
606}
607
608template <class _Tp>
609_LIBCPP_HIDE_FROM_ABI bool atomic_compare_exchange_weak_explicit(
610 atomic<_Tp>* __o,
611 typename atomic<_Tp>::value_type* __e,
612 typename atomic<_Tp>::value_type __d,
613 memory_order __s,
614 memory_order __f) _NOEXCEPT _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f) {
615 return __o->compare_exchange_weak(*__e, __d, __s, __f);
616}
617
618// atomic_compare_exchange_strong_explicit
619
620template <class _Tp>
621_LIBCPP_HIDE_FROM_ABI bool atomic_compare_exchange_strong_explicit(
622 volatile atomic<_Tp>* __o,
623 typename atomic<_Tp>::value_type* __e,
624 typename atomic<_Tp>::value_type __d,
625 memory_order __s,
626 memory_order __f) _NOEXCEPT _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f) {
627 return __o->compare_exchange_strong(*__e, __d, __s, __f);
628}
629
630template <class _Tp>
631_LIBCPP_HIDE_FROM_ABI bool atomic_compare_exchange_strong_explicit(
632 atomic<_Tp>* __o,
633 typename atomic<_Tp>::value_type* __e,
634 typename atomic<_Tp>::value_type __d,
635 memory_order __s,
636 memory_order __f) _NOEXCEPT _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f) {
637 return __o->compare_exchange_strong(*__e, __d, __s, __f);
638}
639
640#if _LIBCPP_STD_VER >= 20
641
642// atomic_wait
643
644template <class _Tp>
645_LIBCPP_AVAILABILITY_SYNC _LIBCPP_HIDE_FROM_ABI void
646atomic_wait(const volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __v) _NOEXCEPT {
647 return __o->wait(__v);
648}
649
650template <class _Tp>
651_LIBCPP_AVAILABILITY_SYNC _LIBCPP_HIDE_FROM_ABI void
652atomic_wait(const atomic<_Tp>* __o, typename atomic<_Tp>::value_type __v) _NOEXCEPT {
653 return __o->wait(__v);
654}
655
656// atomic_wait_explicit
657
658template <class _Tp>
659_LIBCPP_AVAILABILITY_SYNC _LIBCPP_HIDE_FROM_ABI void
660atomic_wait_explicit(const volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __v, memory_order __m) _NOEXCEPT
661 _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m) {
662 return __o->wait(__v, __m);
663}
664
665template <class _Tp>
666_LIBCPP_AVAILABILITY_SYNC _LIBCPP_HIDE_FROM_ABI void
667atomic_wait_explicit(const atomic<_Tp>* __o, typename atomic<_Tp>::value_type __v, memory_order __m) _NOEXCEPT
668 _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m) {
669 return __o->wait(__v, __m);
670}
671
672// atomic_notify_one
673
674template <class _Tp>
675_LIBCPP_AVAILABILITY_SYNC _LIBCPP_HIDE_FROM_ABI void atomic_notify_one(volatile atomic<_Tp>* __o) _NOEXCEPT {
676 __o->notify_one();
677}
678template <class _Tp>
679_LIBCPP_AVAILABILITY_SYNC _LIBCPP_HIDE_FROM_ABI void atomic_notify_one(atomic<_Tp>* __o) _NOEXCEPT {
680 __o->notify_one();
681}
682
683// atomic_notify_all
684
685template <class _Tp>
686_LIBCPP_AVAILABILITY_SYNC _LIBCPP_HIDE_FROM_ABI void atomic_notify_all(volatile atomic<_Tp>* __o) _NOEXCEPT {
687 __o->notify_all();
688}
689template <class _Tp>
690_LIBCPP_AVAILABILITY_SYNC _LIBCPP_HIDE_FROM_ABI void atomic_notify_all(atomic<_Tp>* __o) _NOEXCEPT {
691 __o->notify_all();
692}
693
694#endif // _LIBCPP_STD_VER >= 20
695
696// atomic_fetch_add
697
698template <class _Tp>
699_LIBCPP_HIDE_FROM_ABI _Tp
700atomic_fetch_add(volatile atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op) _NOEXCEPT {
701 return __o->fetch_add(__op);
702}
703
704template <class _Tp>
705_LIBCPP_HIDE_FROM_ABI _Tp atomic_fetch_add(atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op) _NOEXCEPT {
706 return __o->fetch_add(__op);
707}
708
709// atomic_fetch_add_explicit
710
711template <class _Tp>
712_LIBCPP_HIDE_FROM_ABI _Tp atomic_fetch_add_explicit(
713 volatile atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op, memory_order __m) _NOEXCEPT {
714 return __o->fetch_add(__op, __m);
715}
716
717template <class _Tp>
718_LIBCPP_HIDE_FROM_ABI _Tp
719atomic_fetch_add_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op, memory_order __m) _NOEXCEPT {
720 return __o->fetch_add(__op, __m);
721}
722
723// atomic_fetch_sub
724
725template <class _Tp>
726_LIBCPP_HIDE_FROM_ABI _Tp
727atomic_fetch_sub(volatile atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op) _NOEXCEPT {
728 return __o->fetch_sub(__op);
729}
730
731template <class _Tp>
732_LIBCPP_HIDE_FROM_ABI _Tp atomic_fetch_sub(atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op) _NOEXCEPT {
733 return __o->fetch_sub(__op);
734}
735
736// atomic_fetch_sub_explicit
737
738template <class _Tp>
739_LIBCPP_HIDE_FROM_ABI _Tp atomic_fetch_sub_explicit(
740 volatile atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op, memory_order __m) _NOEXCEPT {
741 return __o->fetch_sub(__op, __m);
742}
743
744template <class _Tp>
745_LIBCPP_HIDE_FROM_ABI _Tp
746atomic_fetch_sub_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op, memory_order __m) _NOEXCEPT {
747 return __o->fetch_sub(__op, __m);
748}
749
750// atomic_fetch_and
751
752template <class _Tp, __enable_if_t<is_integral<_Tp>::value && !is_same<_Tp, bool>::value, int> = 0>
753_LIBCPP_HIDE_FROM_ABI _Tp atomic_fetch_and(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op) _NOEXCEPT {
754 return __o->fetch_and(__op);
755}
756
757template <class _Tp, __enable_if_t<is_integral<_Tp>::value && !is_same<_Tp, bool>::value, int> = 0>
758_LIBCPP_HIDE_FROM_ABI _Tp atomic_fetch_and(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op) _NOEXCEPT {
759 return __o->fetch_and(__op);
760}
761
762// atomic_fetch_and_explicit
763
764template <class _Tp, __enable_if_t<is_integral<_Tp>::value && !is_same<_Tp, bool>::value, int> = 0>
765_LIBCPP_HIDE_FROM_ABI _Tp atomic_fetch_and_explicit(
766 volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m) _NOEXCEPT {
767 return __o->fetch_and(__op, __m);
768}
769
770template <class _Tp, __enable_if_t<is_integral<_Tp>::value && !is_same<_Tp, bool>::value, int> = 0>
771_LIBCPP_HIDE_FROM_ABI _Tp
772atomic_fetch_and_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m) _NOEXCEPT {
773 return __o->fetch_and(__op, __m);
774}
775
776// atomic_fetch_or
777
778template <class _Tp, __enable_if_t<is_integral<_Tp>::value && !is_same<_Tp, bool>::value, int> = 0>
779_LIBCPP_HIDE_FROM_ABI _Tp atomic_fetch_or(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op) _NOEXCEPT {
780 return __o->fetch_or(__op);
781}
782
783template <class _Tp, __enable_if_t<is_integral<_Tp>::value && !is_same<_Tp, bool>::value, int> = 0>
784_LIBCPP_HIDE_FROM_ABI _Tp atomic_fetch_or(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op) _NOEXCEPT {
785 return __o->fetch_or(__op);
786}
787
788// atomic_fetch_or_explicit
789
790template <class _Tp, __enable_if_t<is_integral<_Tp>::value && !is_same<_Tp, bool>::value, int> = 0>
791_LIBCPP_HIDE_FROM_ABI _Tp
792atomic_fetch_or_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m) _NOEXCEPT {
793 return __o->fetch_or(__op, __m);
794}
795
796template <class _Tp, __enable_if_t<is_integral<_Tp>::value && !is_same<_Tp, bool>::value, int> = 0>
797_LIBCPP_HIDE_FROM_ABI _Tp
798atomic_fetch_or_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m) _NOEXCEPT {
799 return __o->fetch_or(__op, __m);
800}
801
802// atomic_fetch_xor
803
804template <class _Tp, __enable_if_t<is_integral<_Tp>::value && !is_same<_Tp, bool>::value, int> = 0>
805_LIBCPP_HIDE_FROM_ABI _Tp atomic_fetch_xor(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op) _NOEXCEPT {
806 return __o->fetch_xor(__op);
807}
808
809template <class _Tp, __enable_if_t<is_integral<_Tp>::value && !is_same<_Tp, bool>::value, int> = 0>
810_LIBCPP_HIDE_FROM_ABI _Tp atomic_fetch_xor(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op) _NOEXCEPT {
811 return __o->fetch_xor(__op);
812}
813
814// atomic_fetch_xor_explicit
815
816template <class _Tp, __enable_if_t<is_integral<_Tp>::value && !is_same<_Tp, bool>::value, int> = 0>
817_LIBCPP_HIDE_FROM_ABI _Tp atomic_fetch_xor_explicit(
818 volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m) _NOEXCEPT {
819 return __o->fetch_xor(__op, __m);
820}
821
822template <class _Tp, __enable_if_t<is_integral<_Tp>::value && !is_same<_Tp, bool>::value, int> = 0>
823_LIBCPP_HIDE_FROM_ABI _Tp
824atomic_fetch_xor_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m) _NOEXCEPT {
825 return __o->fetch_xor(__op, __m);
826}
827
828_LIBCPP_END_NAMESPACE_STD
829
830#endif // _LIBCPP___ATOMIC_ATOMIC_H
831