30#ifndef _GLIBCXX_ATOMIC_WAIT_H
31#define _GLIBCXX_ATOMIC_WAIT_H 1
33#pragma GCC system_header
36#if defined _GLIBCXX_HAS_GTHREADS || defined _GLIBCXX_HAVE_LINUX_FUTEX
42#ifdef _GLIBCXX_HAVE_LINUX_FUTEX
52#define __cpp_lib_atomic_wait 201907L
54namespace std _GLIBCXX_VISIBILITY(default)
56_GLIBCXX_BEGIN_NAMESPACE_VERSION
59#ifdef _GLIBCXX_HAVE_LINUX_FUTEX
60#define _GLIBCXX_HAVE_PLATFORM_WAIT 1
61 using __platform_wait_t = int;
62 inline constexpr size_t __platform_wait_alignment = 4;
68# if ATOMIC_LONG_LOCK_FREE == 2
69 using __platform_wait_t =
unsigned long;
71 using __platform_wait_t =
unsigned int;
73 inline constexpr size_t __platform_wait_alignment
74 = __alignof__(__platform_wait_t);
78 template<
typename _Tp>
79 inline constexpr bool __platform_wait_uses_type
80#ifdef _GLIBCXX_HAVE_PLATFORM_WAIT
82 && ((
sizeof(_Tp) ==
sizeof(__detail::__platform_wait_t))
83 && (
alignof(_Tp*) >= __detail::__platform_wait_alignment));
90#ifdef _GLIBCXX_HAVE_LINUX_FUTEX
91 enum class __futex_wait_flags :
int
93#ifdef _GLIBCXX_HAVE_LINUX_FUTEX_PRIVATE
102 __wait_private = __wait | __private_flag,
103 __wake_private = __wake | __private_flag,
104 __wait_bitset_private = __wait_bitset | __private_flag,
105 __wake_bitset_private = __wake_bitset | __private_flag,
106 __bitset_match_any = -1
109 template<
typename _Tp>
111 __platform_wait(
const _Tp* __addr, __platform_wait_t __val)
noexcept
113 auto __e = syscall (SYS_futex,
static_cast<const void*
>(__addr),
114 static_cast<int>(__futex_wait_flags::__wait_private),
116 if (!__e || errno == EAGAIN)
119 __throw_system_error(errno);
122 template<
typename _Tp>
124 __platform_notify(
const _Tp* __addr,
bool __all)
noexcept
126 syscall (SYS_futex,
static_cast<const void*
>(__addr),
127 static_cast<int>(__futex_wait_flags::__wake_private),
128 __all ? INT_MAX : 1);
133 __thread_yield() noexcept
135#if defined _GLIBCXX_HAS_GTHREADS && defined _GLIBCXX_USE_SCHED_YIELD
141 __thread_relax() noexcept
143#if defined __i386__ || defined __x86_64__
144 __builtin_ia32_pause();
150 inline constexpr auto __atomic_spin_count_relax = 12;
151 inline constexpr auto __atomic_spin_count = 16;
153 struct __default_spin_policy
156 operator()() const noexcept
160 template<
typename _Pred,
161 typename _Spin = __default_spin_policy>
163 __atomic_spin(_Pred& __pred, _Spin __spin = _Spin{ })
noexcept
165 for (
auto __i = 0; __i < __atomic_spin_count; ++__i)
170 if (__i < __atomic_spin_count_relax)
171 __detail::__thread_relax();
173 __detail::__thread_yield();
186 template<
typename _Tp>
187 bool __atomic_compare(
const _Tp& __a,
const _Tp& __b)
190 return __builtin_memcmp(&__a, &__b,
sizeof(_Tp)) == 0;
193 struct __waiter_pool_base
197 static constexpr auto _S_align = 64;
199 alignas(_S_align) __platform_wait_t _M_wait = 0;
201#ifndef _GLIBCXX_HAVE_PLATFORM_WAIT
205 alignas(_S_align) __platform_wait_t _M_ver = 0;
207#ifndef _GLIBCXX_HAVE_PLATFORM_WAIT
210 __waiter_pool_base() =
default;
213 _M_enter_wait() noexcept
214 { __atomic_fetch_add(&_M_wait, 1, __ATOMIC_SEQ_CST); }
217 _M_leave_wait() noexcept
218 { __atomic_fetch_sub(&_M_wait, 1, __ATOMIC_RELEASE); }
221 _M_waiting() const noexcept
223 __platform_wait_t __res;
224 __atomic_load(&_M_wait, &__res, __ATOMIC_SEQ_CST);
229 _M_notify(__platform_wait_t* __addr, [[maybe_unused]]
bool __all,
230 bool __bare)
noexcept
232#ifdef _GLIBCXX_HAVE_PLATFORM_WAIT
233 if (__addr == &_M_ver)
235 __atomic_fetch_add(__addr, 1, __ATOMIC_SEQ_CST);
239 if (__bare || _M_waiting())
240 __platform_notify(__addr, __all);
243 lock_guard<mutex> __l(_M_mtx);
244 __atomic_fetch_add(__addr, 1, __ATOMIC_RELAXED);
246 if (__bare || _M_waiting())
251 static __waiter_pool_base&
252 _S_for(
const void* __addr)
noexcept
254 constexpr uintptr_t __ct = 16;
255 static __waiter_pool_base __w[__ct];
256 auto __key = (uintptr_t(__addr) >> 2) % __ct;
261 struct __waiter_pool : __waiter_pool_base
264 _M_do_wait(
const __platform_wait_t* __addr, __platform_wait_t __old)
noexcept
266#ifdef _GLIBCXX_HAVE_PLATFORM_WAIT
267 __platform_wait(__addr, __old);
269 __platform_wait_t __val;
270 __atomic_load(__addr, &__val, __ATOMIC_SEQ_CST);
273 lock_guard<mutex> __l(_M_mtx);
274 __atomic_load(__addr, &__val, __ATOMIC_RELAXED);
282 template<
typename _Tp>
285 using __waiter_type = _Tp;
288 __platform_wait_t* _M_addr;
290 template<
typename _Up>
291 static __platform_wait_t*
292 _S_wait_addr(
const _Up* __a, __platform_wait_t* __b)
294 if constexpr (__platform_wait_uses_type<_Up>)
295 return reinterpret_cast<__platform_wait_t*
>(
const_cast<_Up*
>(__a));
300 static __waiter_type&
301 _S_for(
const void* __addr)
noexcept
303 static_assert(
sizeof(__waiter_type) ==
sizeof(__waiter_pool_base));
304 auto& res = __waiter_pool_base::_S_for(__addr);
305 return reinterpret_cast<__waiter_type&
>(res);
308 template<
typename _Up>
309 explicit __waiter_base(
const _Up* __addr) noexcept
310 : _M_w(_S_for(__addr))
311 , _M_addr(_S_wait_addr(__addr, &_M_w._M_ver))
315 _M_notify(
bool __all,
bool __bare =
false) noexcept
316 { _M_w._M_notify(_M_addr, __all, __bare); }
318 template<
typename _Up,
typename _ValFn,
319 typename _Spin = __default_spin_policy>
321 _S_do_spin_v(__platform_wait_t* __addr,
322 const _Up& __old, _ValFn __vfn,
323 __platform_wait_t& __val,
324 _Spin __spin = _Spin{ })
326 auto const __pred = [=]
327 {
return !__detail::__atomic_compare(__old, __vfn()); };
329 if constexpr (__platform_wait_uses_type<_Up>)
331 __builtin_memcpy(&__val, &__old,
sizeof(__val));
335 __atomic_load(__addr, &__val, __ATOMIC_ACQUIRE);
337 return __atomic_spin(__pred, __spin);
340 template<
typename _Up,
typename _ValFn,
341 typename _Spin = __default_spin_policy>
343 _M_do_spin_v(
const _Up& __old, _ValFn __vfn,
344 __platform_wait_t& __val,
345 _Spin __spin = _Spin{ })
346 {
return _S_do_spin_v(_M_addr, __old, __vfn, __val, __spin); }
348 template<
typename _Pred,
349 typename _Spin = __default_spin_policy>
351 _S_do_spin(
const __platform_wait_t* __addr,
353 __platform_wait_t& __val,
354 _Spin __spin = _Spin{ })
356 __atomic_load(__addr, &__val, __ATOMIC_ACQUIRE);
357 return __atomic_spin(__pred, __spin);
360 template<
typename _Pred,
361 typename _Spin = __default_spin_policy>
363 _M_do_spin(_Pred __pred, __platform_wait_t& __val,
364 _Spin __spin = _Spin{ })
365 {
return _S_do_spin(_M_addr, __pred, __val, __spin); }
368 template<
typename _EntersWait>
369 struct __waiter : __waiter_base<__waiter_pool>
371 using __base_type = __waiter_base<__waiter_pool>;
373 template<
typename _Tp>
374 explicit __waiter(
const _Tp* __addr) noexcept
375 : __base_type(__addr)
377 if constexpr (_EntersWait::value)
378 _M_w._M_enter_wait();
383 if constexpr (_EntersWait::value)
384 _M_w._M_leave_wait();
387 template<
typename _Tp,
typename _ValFn>
389 _M_do_wait_v(_Tp __old, _ValFn __vfn)
393 __platform_wait_t __val;
394 if (__base_type::_M_do_spin_v(__old, __vfn, __val))
396 __base_type::_M_w._M_do_wait(__base_type::_M_addr, __val);
398 while (__detail::__atomic_compare(__old, __vfn()));
401 template<
typename _Pred>
403 _M_do_wait(_Pred __pred)
noexcept
407 __platform_wait_t __val;
408 if (__base_type::_M_do_spin(__pred, __val))
410 __base_type::_M_w._M_do_wait(__base_type::_M_addr, __val);
416 using __enters_wait = __waiter<std::true_type>;
417 using __bare_wait = __waiter<std::false_type>;
420 template<
typename _Tp,
typename _ValFn>
422 __atomic_wait_address_v(
const _Tp* __addr, _Tp __old,
423 _ValFn __vfn)
noexcept
425 __detail::__enters_wait __w(__addr);
426 __w._M_do_wait_v(__old, __vfn);
429 template<
typename _Tp,
typename _Pred>
431 __atomic_wait_address(
const _Tp* __addr, _Pred __pred)
noexcept
433 __detail::__enters_wait __w(__addr);
434 __w._M_do_wait(__pred);
438 template<
typename _Pred>
440 __atomic_wait_address_bare(
const __detail::__platform_wait_t* __addr,
441 _Pred __pred)
noexcept
443#ifdef _GLIBCXX_HAVE_PLATFORM_WAIT
446 __detail::__platform_wait_t __val;
447 if (__detail::__bare_wait::_S_do_spin(__addr, __pred, __val))
449 __detail::__platform_wait(__addr, __val);
453 __detail::__bare_wait __w(__addr);
454 __w._M_do_wait(__pred);
458 template<
typename _Tp>
460 __atomic_notify_address(
const _Tp* __addr,
bool __all)
noexcept
462 __detail::__bare_wait __w(__addr);
463 __w._M_notify(__all);
468 __atomic_notify_address_bare(
const __detail::__platform_wait_t* __addr,
471#ifdef _GLIBCXX_HAVE_PLATFORM_WAIT
472 __detail::__platform_notify(__addr, __all);
474 __detail::__bare_wait __w(__addr);
475 __w._M_notify(__all,
true);
478_GLIBCXX_END_NAMESPACE_VERSION
ISO C++ entities toplevel namespace is std.