1//===----------------------------------------------------------------------===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8
9#ifndef _LIBCPP___ATOMIC_SUPPORT_H
10#define _LIBCPP___ATOMIC_SUPPORT_H
11
12#include <__config>
13
14#if !defined(_LIBCPP_HAS_NO_PRAGMA_SYSTEM_HEADER)
15# pragma GCC system_header
16#endif
17
18//
19// This file implements base support for atomics on the platform.
20//
21// The following operations and types must be implemented (where _Atmc
22// is __cxx_atomic_base_impl for readability):
23//
24// clang-format off
25//
26// template <class _Tp>
27// struct __cxx_atomic_base_impl;
28//
29// #define __cxx_atomic_is_lock_free(__size)
30//
31// void __cxx_atomic_thread_fence(memory_order __order) noexcept;
32// void __cxx_atomic_signal_fence(memory_order __order) noexcept;
33//
34// template <class _Tp>
35// void __cxx_atomic_init(_Atmc<_Tp> volatile* __a, _Tp __val) noexcept;
36// template <class _Tp>
37// void __cxx_atomic_init(_Atmc<_Tp>* __a, _Tp __val) noexcept;
38//
39// template <class _Tp>
40// void __cxx_atomic_store(_Atmc<_Tp> volatile* __a, _Tp __val, memory_order __order) noexcept;
41// template <class _Tp>
42// void __cxx_atomic_store(_Atmc<_Tp>* __a, _Tp __val, memory_order __order) noexcept;
43//
44// template <class _Tp>
45// _Tp __cxx_atomic_load(_Atmc<_Tp> const volatile* __a, memory_order __order) noexcept;
46// template <class _Tp>
47// _Tp __cxx_atomic_load(_Atmc<_Tp> const* __a, memory_order __order) noexcept;
48//
49// template <class _Tp>
50// void __cxx_atomic_load_inplace(_Atmc<_Tp> const volatile* __a, _Tp* __dst, memory_order __order) noexcept;
51// template <class _Tp>
52// void __cxx_atomic_load_inplace(_Atmc<_Tp> const* __a, _Tp* __dst, memory_order __order) noexcept;
53//
54// template <class _Tp>
55// _Tp __cxx_atomic_exchange(_Atmc<_Tp> volatile* __a, _Tp __value, memory_order __order) noexcept;
56// template <class _Tp>
57// _Tp __cxx_atomic_exchange(_Atmc<_Tp>* __a, _Tp __value, memory_order __order) noexcept;
58//
59// template <class _Tp>
60// bool __cxx_atomic_compare_exchange_strong(_Atmc<_Tp> volatile* __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) noexcept;
61// template <class _Tp>
62// bool __cxx_atomic_compare_exchange_strong(_Atmc<_Tp>* __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) noexcept;
63//
64// template <class _Tp>
65// bool __cxx_atomic_compare_exchange_weak(_Atmc<_Tp> volatile* __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) noexcept;
66// template <class _Tp>
67// bool __cxx_atomic_compare_exchange_weak(_Atmc<_Tp>* __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) noexcept;
68//
69// template <class _Tp>
70// _Tp __cxx_atomic_fetch_add(_Atmc<_Tp> volatile* __a, _Tp __delta, memory_order __order) noexcept;
71// template <class _Tp>
72// _Tp __cxx_atomic_fetch_add(_Atmc<_Tp>* __a, _Tp __delta, memory_order __order) noexcept;
73//
74// template <class _Tp>
75// _Tp* __cxx_atomic_fetch_add(_Atmc<_Tp*> volatile* __a, ptrdiff_t __delta, memory_order __order) noexcept;
76// template <class _Tp>
77// _Tp* __cxx_atomic_fetch_add(_Atmc<_Tp*>* __a, ptrdiff_t __delta, memory_order __order) noexcept;
78//
79// template <class _Tp>
80// _Tp __cxx_atomic_fetch_sub(_Atmc<_Tp> volatile* __a, _Tp __delta, memory_order __order) noexcept;
81// template <class _Tp>
82// _Tp __cxx_atomic_fetch_sub(_Atmc<_Tp>* __a, _Tp __delta, memory_order __order) noexcept;
83// template <class _Tp>
84// _Tp* __cxx_atomic_fetch_sub(_Atmc<_Tp*> volatile* __a, ptrdiff_t __delta, memory_order __order) noexcept;
85// template <class _Tp>
86// _Tp* __cxx_atomic_fetch_sub(_Atmc<_Tp*>* __a, ptrdiff_t __delta, memory_order __order) noexcept;
87//
88// template <class _Tp>
89// _Tp __cxx_atomic_fetch_and(_Atmc<_Tp> volatile* __a, _Tp __pattern, memory_order __order) noexcept;
90// template <class _Tp>
91// _Tp __cxx_atomic_fetch_and(_Atmc<_Tp>* __a, _Tp __pattern, memory_order __order) noexcept;
92//
93// template <class _Tp>
94// _Tp __cxx_atomic_fetch_or(_Atmc<_Tp> volatile* __a, _Tp __pattern, memory_order __order) noexcept;
95// template <class _Tp>
96// _Tp __cxx_atomic_fetch_or(_Atmc<_Tp>* __a, _Tp __pattern, memory_order __order) noexcept;
97// template <class _Tp>
98// _Tp __cxx_atomic_fetch_xor(_Atmc<_Tp> volatile* __a, _Tp __pattern, memory_order __order) noexcept;
99// template <class _Tp>
100// _Tp __cxx_atomic_fetch_xor(_Atmc<_Tp>* __a, _Tp __pattern, memory_order __order) noexcept;
101//
102// clang-format on
103//
104
105#if _LIBCPP_HAS_GCC_ATOMIC_IMP
106# include <__atomic/support/gcc.h>
107#elif _LIBCPP_HAS_C_ATOMIC_IMP
108# include <__atomic/support/c11.h>
109#endif
110
111_LIBCPP_BEGIN_NAMESPACE_STD
112
113template <typename _Tp, typename _Base = __cxx_atomic_base_impl<_Tp> >
114struct __cxx_atomic_impl : public _Base {
115 _LIBCPP_HIDE_FROM_ABI __cxx_atomic_impl() _NOEXCEPT = default;
116 _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR explicit __cxx_atomic_impl(_Tp __value) _NOEXCEPT : _Base(__value) {}
117};
118
119_LIBCPP_END_NAMESPACE_STD
120
121#endif // _LIBCPP___ATOMIC_SUPPORT_H
122