1//===----------------------------------------------------------------------===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8
9#ifndef _LIBCPP___ATOMIC_ATOMIC_SYNC_H
10#define _LIBCPP___ATOMIC_ATOMIC_SYNC_H
11
12#include <__atomic/contention_t.h>
13#include <__atomic/memory_order.h>
14#include <__atomic/to_gcc_order.h>
15#include <__chrono/duration.h>
16#include <__config>
17#include <__memory/addressof.h>
18#include <__thread/poll_with_backoff.h>
19#include <__type_traits/conjunction.h>
20#include <__type_traits/decay.h>
21#include <__type_traits/invoke.h>
22#include <__type_traits/void_t.h>
23#include <__utility/declval.h>
24#include <cstring>
25
26#if !defined(_LIBCPP_HAS_NO_PRAGMA_SYSTEM_HEADER)
27# pragma GCC system_header
28#endif
29
30_LIBCPP_BEGIN_NAMESPACE_STD
31
32// The customisation points to enable the following functions:
33// - __atomic_wait
34// - __atomic_wait_unless
35// - __atomic_notify_one
36// - __atomic_notify_all
37// Note that std::atomic<T>::wait was back-ported to C++03
38// The below implementations look ugly to support C++03
39template <class _Tp, class = void>
40struct __atomic_waitable_traits {
41 template <class _AtomicWaitable>
42 static void __atomic_load(_AtomicWaitable&&, memory_order) = delete;
43
44 template <class _AtomicWaitable>
45 static void __atomic_contention_address(_AtomicWaitable&&) = delete;
46};
47
48template <class _Tp, class = void>
49struct __atomic_waitable : false_type {};
50
51template <class _Tp>
52struct __atomic_waitable< _Tp,
53 __void_t<decltype(__atomic_waitable_traits<__decay_t<_Tp> >::__atomic_load(
54 std::declval<const _Tp&>(), std::declval<memory_order>())),
55 decltype(__atomic_waitable_traits<__decay_t<_Tp> >::__atomic_contention_address(
56 std::declval<const _Tp&>()))> > : true_type {};
57
58#if _LIBCPP_STD_VER >= 20
59# if _LIBCPP_HAS_THREADS
60
61_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __cxx_atomic_notify_one(void const volatile*) _NOEXCEPT;
62_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __cxx_atomic_notify_all(void const volatile*) _NOEXCEPT;
63_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI __cxx_contention_t
64__libcpp_atomic_monitor(void const volatile*) _NOEXCEPT;
65_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void
66__libcpp_atomic_wait(void const volatile*, __cxx_contention_t) _NOEXCEPT;
67
68_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void
69__cxx_atomic_notify_one(__cxx_atomic_contention_t const volatile*) _NOEXCEPT;
70_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void
71__cxx_atomic_notify_all(__cxx_atomic_contention_t const volatile*) _NOEXCEPT;
72_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI __cxx_contention_t
73__libcpp_atomic_monitor(__cxx_atomic_contention_t const volatile*) _NOEXCEPT;
74_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void
75__libcpp_atomic_wait(__cxx_atomic_contention_t const volatile*, __cxx_contention_t) _NOEXCEPT;
76
77template <class _AtomicWaitable, class _Poll>
78struct __atomic_wait_backoff_impl {
79 const _AtomicWaitable& __a_;
80 _Poll __poll_;
81 memory_order __order_;
82
83 using __waitable_traits _LIBCPP_NODEBUG = __atomic_waitable_traits<__decay_t<_AtomicWaitable> >;
84
85 _LIBCPP_AVAILABILITY_SYNC
86 _LIBCPP_HIDE_FROM_ABI bool
87 __update_monitor_val_and_poll(__cxx_atomic_contention_t const volatile*, __cxx_contention_t& __monitor_val) const {
88 // In case the contention type happens to be __cxx_atomic_contention_t, i.e. __cxx_atomic_impl<int64_t>,
89 // the platform wait is directly monitoring the atomic value itself.
90 // `__poll_` takes the current value of the atomic as an in-out argument
91 // to potentially modify it. After it returns, `__monitor` has a value
92 // which can be safely waited on by `std::__libcpp_atomic_wait` without any
93 // ABA style issues.
94 __monitor_val = __waitable_traits::__atomic_load(__a_, __order_);
95 return __poll_(__monitor_val);
96 }
97
98 _LIBCPP_AVAILABILITY_SYNC
99 _LIBCPP_HIDE_FROM_ABI bool
100 __update_monitor_val_and_poll(void const volatile* __contention_address, __cxx_contention_t& __monitor_val) const {
101 // In case the contention type is anything else, platform wait is monitoring a __cxx_atomic_contention_t
102 // from the global pool, the monitor comes from __libcpp_atomic_monitor
103 __monitor_val = std::__libcpp_atomic_monitor(__contention_address);
104 auto __current_val = __waitable_traits::__atomic_load(__a_, __order_);
105 return __poll_(__current_val);
106 }
107
108 _LIBCPP_AVAILABILITY_SYNC
109 _LIBCPP_HIDE_FROM_ABI bool operator()(chrono::nanoseconds __elapsed) const {
110 if (__elapsed > chrono::microseconds(4)) {
111 auto __contention_address = __waitable_traits::__atomic_contention_address(__a_);
112 __cxx_contention_t __monitor_val;
113 if (__update_monitor_val_and_poll(__contention_address, __monitor_val))
114 return true;
115 std::__libcpp_atomic_wait(__contention_address, __monitor_val);
116 } else {
117 } // poll
118 return false;
119 }
120};
121
122// The semantics of this function are similar to `atomic`'s
123// `.wait(T old, std::memory_order order)`, but instead of having a hardcoded
124// predicate (is the loaded value unequal to `old`?), the predicate function is
125// specified as an argument. The loaded value is given as an in-out argument to
126// the predicate. If the predicate function returns `true`,
127// `__atomic_wait_unless` will return. If the predicate function returns
128// `false`, it must set the argument to its current understanding of the atomic
129// value. The predicate function must not return `false` spuriously.
130template <class _AtomicWaitable, class _Poll>
131_LIBCPP_AVAILABILITY_SYNC _LIBCPP_HIDE_FROM_ABI void
132__atomic_wait_unless(const _AtomicWaitable& __a, memory_order __order, _Poll&& __poll) {
133 static_assert(__atomic_waitable<_AtomicWaitable>::value, "");
134 __atomic_wait_backoff_impl<_AtomicWaitable, __decay_t<_Poll> > __backoff_fn = {__a, __poll, __order};
135 std::__libcpp_thread_poll_with_backoff(
136 /* poll */
137 [&]() {
138 auto __current_val = __atomic_waitable_traits<__decay_t<_AtomicWaitable> >::__atomic_load(__a, __order);
139 return __poll(__current_val);
140 },
141 /* backoff */ __backoff_fn);
142}
143
144template <class _AtomicWaitable>
145_LIBCPP_AVAILABILITY_SYNC _LIBCPP_HIDE_FROM_ABI void __atomic_notify_one(const _AtomicWaitable& __a) {
146 static_assert(__atomic_waitable<_AtomicWaitable>::value, "");
147 std::__cxx_atomic_notify_one(__atomic_waitable_traits<__decay_t<_AtomicWaitable> >::__atomic_contention_address(__a));
148}
149
150template <class _AtomicWaitable>
151_LIBCPP_AVAILABILITY_SYNC _LIBCPP_HIDE_FROM_ABI void __atomic_notify_all(const _AtomicWaitable& __a) {
152 static_assert(__atomic_waitable<_AtomicWaitable>::value, "");
153 std::__cxx_atomic_notify_all(__atomic_waitable_traits<__decay_t<_AtomicWaitable> >::__atomic_contention_address(__a));
154}
155
156# else // _LIBCPP_HAS_THREADS
157
158template <class _AtomicWaitable, class _Poll>
159_LIBCPP_HIDE_FROM_ABI void __atomic_wait_unless(const _AtomicWaitable& __a, memory_order __order, _Poll&& __poll) {
160 std::__libcpp_thread_poll_with_backoff(
161 /* poll */
162 [&]() {
163 auto __current_val = __atomic_waitable_traits<__decay_t<_AtomicWaitable> >::__atomic_load(__a, __order);
164 return __poll(__current_val);
165 },
166 /* backoff */ __spinning_backoff_policy());
167}
168
169template <class _AtomicWaitable>
170_LIBCPP_HIDE_FROM_ABI void __atomic_notify_one(const _AtomicWaitable&) {}
171
172template <class _AtomicWaitable>
173_LIBCPP_HIDE_FROM_ABI void __atomic_notify_all(const _AtomicWaitable&) {}
174
175# endif // _LIBCPP_HAS_THREADS
176
177template <typename _Tp>
178_LIBCPP_HIDE_FROM_ABI bool __cxx_nonatomic_compare_equal(_Tp const& __lhs, _Tp const& __rhs) {
179 return std::memcmp(s1: std::addressof(__lhs), s2: std::addressof(__rhs), n: sizeof(_Tp)) == 0;
180}
181
182template <class _AtomicWaitable, class _Tp>
183_LIBCPP_AVAILABILITY_SYNC _LIBCPP_HIDE_FROM_ABI void
184__atomic_wait(_AtomicWaitable& __a, _Tp __val, memory_order __order) {
185 static_assert(__atomic_waitable<_AtomicWaitable>::value, "");
186 std::__atomic_wait_unless(__a, __order, [&](_Tp const& __current) {
187 return !std::__cxx_nonatomic_compare_equal(__current, __val);
188 });
189}
190
191#endif // C++20
192
193_LIBCPP_END_NAMESPACE_STD
194
195#endif // _LIBCPP___ATOMIC_ATOMIC_SYNC_H
196