1//===----------------------------------------------------------------------===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8
9#ifndef _LIBCPP___ATOMIC_ATOMIC_SYNC_H
10#define _LIBCPP___ATOMIC_ATOMIC_SYNC_H
11
12#include <__atomic/atomic_waitable_traits.h>
13#include <__atomic/contention_t.h>
14#include <__atomic/memory_order.h>
15#include <__chrono/duration.h>
16#include <__config>
17#include <__memory/addressof.h>
18#include <__thread/poll_with_backoff.h>
19#include <__type_traits/decay.h>
20#include <cstring>
21
22#if !defined(_LIBCPP_HAS_NO_PRAGMA_SYSTEM_HEADER)
23# pragma GCC system_header
24#endif
25
26_LIBCPP_BEGIN_NAMESPACE_STD
27
28#if _LIBCPP_STD_VER >= 20
29# if _LIBCPP_HAS_THREADS
30
31# if !_LIBCPP_AVAILABILITY_HAS_NEW_SYNC
32
33// old dylib interface kept for backwards compatibility
34_LIBCPP_EXPORTED_FROM_ABI void __cxx_atomic_notify_one(void const volatile*) _NOEXCEPT;
35_LIBCPP_EXPORTED_FROM_ABI void __cxx_atomic_notify_all(void const volatile*) _NOEXCEPT;
36_LIBCPP_EXPORTED_FROM_ABI __cxx_contention_t __libcpp_atomic_monitor(void const volatile*) _NOEXCEPT;
37_LIBCPP_EXPORTED_FROM_ABI void __libcpp_atomic_wait(void const volatile*, __cxx_contention_t) _NOEXCEPT;
38
39_LIBCPP_EXPORTED_FROM_ABI void __cxx_atomic_notify_one(__cxx_atomic_contention_t const volatile*) _NOEXCEPT;
40_LIBCPP_EXPORTED_FROM_ABI void __cxx_atomic_notify_all(__cxx_atomic_contention_t const volatile*) _NOEXCEPT;
41_LIBCPP_EXPORTED_FROM_ABI __cxx_contention_t
42__libcpp_atomic_monitor(__cxx_atomic_contention_t const volatile*) _NOEXCEPT;
43_LIBCPP_EXPORTED_FROM_ABI void
44__libcpp_atomic_wait(__cxx_atomic_contention_t const volatile*, __cxx_contention_t) _NOEXCEPT;
45# endif // !_LIBCPP_AVAILABILITY_HAS_NEW_SYNC
46
47// new dylib interface
48
49// return the global contention state's current value for the address
50_LIBCPP_AVAILABILITY_NEW_SYNC _LIBCPP_EXPORTED_FROM_ABI __cxx_contention_t
51__atomic_monitor_global(void const* __address) _NOEXCEPT;
52
53// wait on the global contention state to be changed from the given value for the address
54_LIBCPP_AVAILABILITY_NEW_SYNC _LIBCPP_EXPORTED_FROM_ABI void
55__atomic_wait_global_table(void const* __address, __cxx_contention_t __monitor_value) _NOEXCEPT;
56
57// notify one waiter waiting on the global contention state for the address
58_LIBCPP_AVAILABILITY_NEW_SYNC _LIBCPP_EXPORTED_FROM_ABI void __atomic_notify_one_global_table(void const*) _NOEXCEPT;
59
60// notify all waiters waiting on the global contention state for the address
61_LIBCPP_AVAILABILITY_NEW_SYNC _LIBCPP_EXPORTED_FROM_ABI void __atomic_notify_all_global_table(void const*) _NOEXCEPT;
62
63// wait on the address directly with the native platform wait
64template <std::size_t _Size>
65_LIBCPP_AVAILABILITY_NEW_SYNC _LIBCPP_EXPORTED_FROM_ABI void
66__atomic_wait_native(void const* __address, void const* __old_value) _NOEXCEPT;
67
68// notify one waiter waiting on the address directly with the native platform wait
69template <std::size_t _Size>
70_LIBCPP_AVAILABILITY_NEW_SYNC _LIBCPP_EXPORTED_FROM_ABI void __atomic_notify_one_native(const void*) _NOEXCEPT;
71
72// notify all waiters waiting on the address directly with the native platform wait
73template <std::size_t _Size>
74_LIBCPP_AVAILABILITY_NEW_SYNC _LIBCPP_EXPORTED_FROM_ABI void __atomic_notify_all_native(const void*) _NOEXCEPT;
75
76# if _LIBCPP_AVAILABILITY_HAS_NEW_SYNC
77
78template <class _AtomicWaitable, class _Poll>
79struct __atomic_wait_backoff_impl {
80 const _AtomicWaitable& __a_;
81 _Poll __poll_;
82 memory_order __order_;
83
84 using __waitable_traits _LIBCPP_NODEBUG = __atomic_waitable_traits<__decay_t<_AtomicWaitable> >;
85 using __value_type _LIBCPP_NODEBUG = typename __waitable_traits::__value_type;
86
87 _LIBCPP_HIDE_FROM_ABI __backoff_results operator()(chrono::nanoseconds __elapsed) const {
88 if (__elapsed > chrono::microseconds(4)) {
89 auto __contention_address = const_cast<const void*>(
90 static_cast<const volatile void*>(__waitable_traits::__atomic_contention_address(__a_)));
91
92 if constexpr (__has_native_atomic_wait<__value_type>) {
93 auto __atomic_value = __waitable_traits::__atomic_load(__a_, __order_);
94 if (__poll_(__atomic_value))
95 return __backoff_results::__poll_success;
96 std::__atomic_wait_native<sizeof(__value_type)>(__contention_address, std::addressof(__atomic_value));
97 } else {
98 __cxx_contention_t __monitor_val = std::__atomic_monitor_global(address: __contention_address);
99 auto __atomic_value = __waitable_traits::__atomic_load(__a_, __order_);
100 if (__poll_(__atomic_value))
101 return __backoff_results::__poll_success;
102 std::__atomic_wait_global_table(address: __contention_address, monitor_value: __monitor_val);
103 }
104 } else {
105 } // poll
106 return __backoff_results::__continue_poll;
107 }
108};
109
110# else // _LIBCPP_AVAILABILITY_HAS_NEW_SYNC
111
112template <class _AtomicWaitable, class _Poll>
113struct __atomic_wait_backoff_impl {
114 const _AtomicWaitable& __a_;
115 _Poll __poll_;
116 memory_order __order_;
117
118 using __waitable_traits _LIBCPP_NODEBUG = __atomic_waitable_traits<__decay_t<_AtomicWaitable> >;
119
120 _LIBCPP_HIDE_FROM_ABI bool
121 __update_monitor_val_and_poll(__cxx_atomic_contention_t const volatile*, __cxx_contention_t& __monitor_val) const {
122 // In case the contention type happens to be __cxx_atomic_contention_t, i.e. __cxx_atomic_impl<int64_t>,
123 // the platform wait is directly monitoring the atomic value itself.
124 // `__poll_` takes the current value of the atomic as an in-out argument
125 // to potentially modify it. After it returns, `__monitor` has a value
126 // which can be safely waited on by `std::__libcpp_atomic_wait` without any
127 // ABA style issues.
128 __monitor_val = __waitable_traits::__atomic_load(__a_, __order_);
129 return __poll_(__monitor_val);
130 }
131
132 _LIBCPP_HIDE_FROM_ABI bool
133 __update_monitor_val_and_poll(void const volatile* __contention_address, __cxx_contention_t& __monitor_val) const {
134 // In case the contention type is anything else, platform wait is monitoring a __cxx_atomic_contention_t
135 // from the global pool, the monitor comes from __libcpp_atomic_monitor
136 __monitor_val = std::__libcpp_atomic_monitor(__contention_address);
137 auto __current_val = __waitable_traits::__atomic_load(__a_, __order_);
138 return __poll_(__current_val);
139 }
140
141 _LIBCPP_HIDE_FROM_ABI __backoff_results operator()(chrono::nanoseconds __elapsed) const {
142 if (__elapsed > chrono::microseconds(4)) {
143 auto __contention_address = __waitable_traits::__atomic_contention_address(__a_);
144 __cxx_contention_t __monitor_val;
145 if (__update_monitor_val_and_poll(__contention_address, __monitor_val))
146 return __backoff_results::__poll_success;
147 std::__libcpp_atomic_wait(__contention_address, __monitor_val);
148 } else {
149 } // poll
150 return __backoff_results::__continue_poll;
151 }
152};
153
154# endif // _LIBCPP_AVAILABILITY_HAS_NEW_SYNC
155
156// The semantics of this function are similar to `atomic`'s
157// `.wait(T old, std::memory_order order)`, but instead of having a hardcoded
158// predicate (is the loaded value unequal to `old`?), the predicate function is
159// specified as an argument. The loaded value is given as an in-out argument to
160// the predicate. If the predicate function returns `true`,
161// `__atomic_wait_unless` will return. If the predicate function returns
162// `false`, it must set the argument to its current understanding of the atomic
163// value. The predicate function must not return `false` spuriously.
164template <class _AtomicWaitable, class _Poll>
165_LIBCPP_HIDE_FROM_ABI void __atomic_wait_unless(const _AtomicWaitable& __a, memory_order __order, _Poll&& __poll) {
166 static_assert(__atomic_waitable<_AtomicWaitable>);
167 __atomic_wait_backoff_impl<_AtomicWaitable, __decay_t<_Poll> > __backoff_fn = {__a, __poll, __order};
168 std::__libcpp_thread_poll_with_backoff(
169 /* poll */
170 [&]() {
171 auto __current_val = __atomic_waitable_traits<__decay_t<_AtomicWaitable> >::__atomic_load(__a, __order);
172 return __poll(__current_val);
173 },
174 /* backoff */ __backoff_fn);
175}
176
177# if _LIBCPP_AVAILABILITY_HAS_NEW_SYNC
178
179template <class _AtomicWaitable>
180_LIBCPP_HIDE_FROM_ABI void __atomic_notify_one(const _AtomicWaitable& __a) {
181 static_assert(__atomic_waitable<_AtomicWaitable>);
182 using __value_type _LIBCPP_NODEBUG = typename __atomic_waitable_traits<__decay_t<_AtomicWaitable> >::__value_type;
183 using __waitable_traits _LIBCPP_NODEBUG = __atomic_waitable_traits<__decay_t<_AtomicWaitable> >;
184 auto __contention_address =
185 const_cast<const void*>(static_cast<const volatile void*>(__waitable_traits::__atomic_contention_address(__a)));
186 if constexpr (__has_native_atomic_wait<__value_type>) {
187 std::__atomic_notify_one_native<sizeof(__value_type)>(__contention_address);
188 } else {
189 std::__atomic_notify_one_global_table(__contention_address);
190 }
191}
192
193template <class _AtomicWaitable>
194_LIBCPP_HIDE_FROM_ABI void __atomic_notify_all(const _AtomicWaitable& __a) {
195 static_assert(__atomic_waitable<_AtomicWaitable>);
196 using __value_type _LIBCPP_NODEBUG = typename __atomic_waitable_traits<__decay_t<_AtomicWaitable> >::__value_type;
197 using __waitable_traits _LIBCPP_NODEBUG = __atomic_waitable_traits<__decay_t<_AtomicWaitable> >;
198 auto __contention_address =
199 const_cast<const void*>(static_cast<const volatile void*>(__waitable_traits::__atomic_contention_address(__a)));
200 if constexpr (__has_native_atomic_wait<__value_type>) {
201 std::__atomic_notify_all_native<sizeof(__value_type)>(__contention_address);
202 } else {
203 std::__atomic_notify_all_global_table(__contention_address);
204 }
205}
206
207# else // _LIBCPP_AVAILABILITY_HAS_NEW_SYNC
208
209template <class _AtomicWaitable>
210_LIBCPP_HIDE_FROM_ABI void __atomic_notify_one(const _AtomicWaitable& __a) {
211 static_assert(__atomic_waitable<_AtomicWaitable>);
212 std::__cxx_atomic_notify_one(__atomic_waitable_traits<__decay_t<_AtomicWaitable> >::__atomic_contention_address(__a));
213}
214
215template <class _AtomicWaitable>
216_LIBCPP_HIDE_FROM_ABI void __atomic_notify_all(const _AtomicWaitable& __a) {
217 static_assert(__atomic_waitable<_AtomicWaitable>);
218 std::__cxx_atomic_notify_all(__atomic_waitable_traits<__decay_t<_AtomicWaitable> >::__atomic_contention_address(__a));
219}
220
221# endif
222
223# else // _LIBCPP_HAS_THREADS
224
225template <class _AtomicWaitable, class _Poll>
226_LIBCPP_HIDE_FROM_ABI void __atomic_wait_unless(const _AtomicWaitable& __a, memory_order __order, _Poll&& __poll) {
227 std::__libcpp_thread_poll_with_backoff(
228 /* poll */
229 [&]() {
230 auto __current_val = __atomic_waitable_traits<__decay_t<_AtomicWaitable> >::__atomic_load(__a, __order);
231 return __poll(__current_val);
232 },
233 /* backoff */ __spinning_backoff_policy());
234}
235
236template <class _AtomicWaitable>
237_LIBCPP_HIDE_FROM_ABI void __atomic_notify_one(const _AtomicWaitable&) {}
238
239template <class _AtomicWaitable>
240_LIBCPP_HIDE_FROM_ABI void __atomic_notify_all(const _AtomicWaitable&) {}
241
242# endif // _LIBCPP_HAS_THREADS
243
244template <typename _Tp>
245_LIBCPP_HIDE_FROM_ABI bool __cxx_nonatomic_compare_equal(_Tp const& __lhs, _Tp const& __rhs) {
246 return std::memcmp(s1: std::addressof(__lhs), s2: std::addressof(__rhs), n: sizeof(_Tp)) == 0;
247}
248
249template <class _AtomicWaitable, class _Tp>
250_LIBCPP_HIDE_FROM_ABI void __atomic_wait(_AtomicWaitable& __a, _Tp __val, memory_order __order) {
251 static_assert(__atomic_waitable<_AtomicWaitable>);
252 std::__atomic_wait_unless(__a, __order, [&](_Tp const& __current) {
253 return !std::__cxx_nonatomic_compare_equal(__current, __val);
254 });
255}
256
257#endif // C++20
258
259_LIBCPP_END_NAMESPACE_STD
260
261#endif // _LIBCPP___ATOMIC_ATOMIC_SYNC_H
262