30 #ifndef _GLIBCXX_ATOMIC_TIMED_WAIT_H
31 #define _GLIBCXX_ATOMIC_TIMED_WAIT_H 1
33 #pragma GCC system_header
37 #if __cpp_lib_atomic_wait
42 #ifdef _GLIBCXX_HAVE_LINUX_FUTEX
46 namespace std _GLIBCXX_VISIBILITY(default)
48 _GLIBCXX_BEGIN_NAMESPACE_VERSION
52 using __wait_clock_t = chrono::steady_clock;
54 template<
typename _Clock,
typename _Dur>
55 __wait_clock_t::time_point
56 __to_wait_clock(
const chrono::time_point<_Clock, _Dur>& __atime) noexcept
58 const typename _Clock::time_point __c_entry = _Clock::now();
59 const __wait_clock_t::time_point __w_entry = __wait_clock_t::now();
60 const auto __delta = __atime - __c_entry;
61 using __w_dur =
typename __wait_clock_t::duration;
62 return __w_entry + chrono::ceil<__w_dur>(__delta);
65 template<
typename _Dur>
66 __wait_clock_t::time_point
67 __to_wait_clock(
const chrono::time_point<__wait_clock_t,
68 _Dur>& __atime) noexcept
70 using __w_dur =
typename __wait_clock_t::duration;
71 return chrono::ceil<__w_dur>(__atime);
74 #ifdef _GLIBCXX_HAVE_LINUX_FUTEX
75 #define _GLIBCXX_HAVE_PLATFORM_TIMED_WAIT
77 template<
typename _Dur>
79 __platform_wait_until_impl(
const __platform_wait_t* __addr,
80 __platform_wait_t __old,
81 const chrono::time_point<__wait_clock_t, _Dur>&
84 auto __s = chrono::time_point_cast<chrono::seconds>(__atime);
85 auto __ns = chrono::duration_cast<chrono::nanoseconds>(__atime - __s);
87 struct timespec __rt =
89 static_cast<std::time_t
>(__s.time_since_epoch().count()),
90 static_cast<long>(__ns.count())
93 auto __e = syscall (SYS_futex, __addr,
94 static_cast<int>(__futex_wait_flags::
95 __wait_bitset_private),
96 __old, &__rt,
nullptr,
97 static_cast<int>(__futex_wait_flags::
102 if (errno == ETIMEDOUT)
104 if (errno != EINTR && errno != EAGAIN)
105 __throw_system_error(errno);
111 template<
typename _Clock,
typename _Dur>
113 __platform_wait_until(
const __platform_wait_t* __addr, __platform_wait_t __old,
114 const chrono::time_point<_Clock, _Dur>& __atime)
116 if constexpr (is_same_v<__wait_clock_t, _Clock>)
118 return __platform_wait_until_impl(__addr, __old, __atime);
122 if (!__platform_wait_until_impl(__addr, __old,
123 __to_wait_clock(__atime)))
128 if (_Clock::now() < __atime)
142 template<
typename _Clock,
typename _Dur>
144 __cond_wait_until_impl(__condvar& __cv, mutex& __mx,
145 const chrono::time_point<_Clock, _Dur>& __atime)
147 static_assert(std::__is_one_of<_Clock, chrono::steady_clock,
148 chrono::system_clock>::value);
150 auto __s = chrono::time_point_cast<chrono::seconds>(__atime);
151 auto __ns = chrono::duration_cast<chrono::nanoseconds>(__atime - __s);
153 __gthread_time_t __ts =
155 static_cast<std::time_t
>(__s.time_since_epoch().count()),
156 static_cast<long>(__ns.count())
159 #ifdef _GLIBCXX_USE_PTHREAD_COND_CLOCKWAIT
160 if constexpr (is_same_v<chrono::steady_clock, _Clock>)
161 __cv.wait_until(__mx, CLOCK_MONOTONIC, __ts);
164 __cv.wait_until(__mx, __ts);
165 return _Clock::now() < __atime;
169 template<
typename _Clock,
typename _Dur>
171 __cond_wait_until(__condvar& __cv, mutex& __mx,
172 const chrono::time_point<_Clock, _Dur>& __atime)
174 #ifdef _GLIBCXX_USE_PTHREAD_COND_CLOCKWAIT
175 if constexpr (is_same_v<_Clock, chrono::steady_clock>)
176 return __detail::__cond_wait_until_impl(__cv, __mx, __atime);
179 if constexpr (is_same_v<_Clock, chrono::system_clock>)
180 return __detail::__cond_wait_until_impl(__cv, __mx, __atime);
183 if (__cond_wait_until_impl(__cv, __mx,
184 __to_wait_clock(__atime)))
189 if (_Clock::now() < __atime)
196 struct __timed_waiter_pool : __waiter_pool_base
199 template<
typename _Clock,
typename _Dur>
201 _M_do_wait_until(__platform_wait_t* __addr, __platform_wait_t __old,
202 const chrono::time_point<_Clock, _Dur>& __atime)
204 #ifdef _GLIBCXX_HAVE_PLATFORM_TIMED_WAIT
205 return __platform_wait_until(__addr, __old, __atime);
207 __platform_wait_t __val;
208 __atomic_load(__addr, &__val, __ATOMIC_RELAXED);
211 lock_guard<mutex> __l(_M_mtx);
212 return __cond_wait_until(_M_cv, _M_mtx, __atime);
220 struct __timed_backoff_spin_policy
222 __wait_clock_t::time_point _M_deadline;
223 __wait_clock_t::time_point _M_t0;
225 template<
typename _Clock,
typename _Dur>
226 __timed_backoff_spin_policy(chrono::time_point<_Clock, _Dur>
227 __deadline = _Clock::time_point::max(),
228 chrono::time_point<_Clock, _Dur>
229 __t0 = _Clock::now()) noexcept
230 : _M_deadline(__to_wait_clock(__deadline))
231 , _M_t0(__to_wait_clock(__t0))
235 operator()() const noexcept
237 using namespace literals::chrono_literals;
238 auto __now = __wait_clock_t::now();
239 if (_M_deadline <= __now)
242 auto __elapsed = __now - _M_t0;
243 if (__elapsed > 128ms)
247 else if (__elapsed > 64us)
251 else if (__elapsed > 4us)
261 template<
typename _EntersWait>
262 struct __timed_waiter : __waiter_base<__timed_waiter_pool>
264 using __base_type = __waiter_base<__timed_waiter_pool>;
266 template<
typename _Tp>
267 __timed_waiter(
const _Tp* __addr) noexcept
268 : __base_type(__addr)
270 if constexpr (_EntersWait::value)
271 _M_w._M_enter_wait();
276 if constexpr (_EntersWait::value)
277 _M_w._M_leave_wait();
281 template<
typename _Tp,
typename _ValFn,
282 typename _Clock,
typename _Dur>
284 _M_do_wait_until_v(_Tp __old, _ValFn __vfn,
285 const chrono::time_point<_Clock, _Dur>&
288 __platform_wait_t __val;
289 if (_M_do_spin(__old,
std::move(__vfn), __val,
290 __timed_backoff_spin_policy(__atime)))
292 return __base_type::_M_w._M_do_wait_until(__base_type::_M_addr, __val, __atime);
296 template<
typename _Pred,
297 typename _Clock,
typename _Dur>
299 _M_do_wait_until(_Pred __pred, __platform_wait_t __val,
300 const chrono::time_point<_Clock, _Dur>&
303 for (
auto __now = _Clock::now(); __now < __atime;
304 __now = _Clock::now())
306 if (__base_type::_M_w._M_do_wait_until(
307 __base_type::_M_addr, __val, __atime)
311 if (__base_type::_M_do_spin(__pred, __val,
312 __timed_backoff_spin_policy(__atime, __now)))
319 template<
typename _Pred,
320 typename _Clock,
typename _Dur>
322 _M_do_wait_until(_Pred __pred,
323 const chrono::time_point<_Clock, _Dur>&
326 __platform_wait_t __val;
327 if (__base_type::_M_do_spin(__pred, __val,
328 __timed_backoff_spin_policy(__atime)))
330 return _M_do_wait_until(__pred, __val, __atime);
333 template<
typename _Tp,
typename _ValFn,
334 typename _Rep,
typename _Period>
336 _M_do_wait_for_v(_Tp __old, _ValFn __vfn,
337 const chrono::duration<_Rep, _Period>&
340 __platform_wait_t __val;
341 if (_M_do_spin_v(__old,
std::move(__vfn), __val))
344 if (!__rtime.count())
347 auto __reltime = chrono::ceil<__wait_clock_t::duration>(__rtime);
349 return __base_type::_M_w._M_do_wait_until(
350 __base_type::_M_addr,
352 chrono::steady_clock::now() + __reltime);
355 template<
typename _Pred,
356 typename _Rep,
typename _Period>
358 _M_do_wait_for(_Pred __pred,
359 const chrono::duration<_Rep, _Period>& __rtime) noexcept
361 __platform_wait_t __val;
362 if (__base_type::_M_do_spin(__pred, __val))
365 if (!__rtime.count())
368 auto __reltime = chrono::ceil<__wait_clock_t::duration>(__rtime);
370 return _M_do_wait_until(__pred, __val,
371 chrono::steady_clock::now() + __reltime);
375 using __enters_timed_wait = __timed_waiter<std::true_type>;
376 using __bare_timed_wait = __timed_waiter<std::false_type>;
380 template<
typename _Tp,
typename _ValFn,
381 typename _Clock,
typename _Dur>
383 __atomic_wait_address_until_v(
const _Tp* __addr, _Tp&& __old, _ValFn&& __vfn,
384 const chrono::time_point<_Clock, _Dur>&
387 __detail::__enters_timed_wait __w{__addr};
388 return __w._M_do_wait_until_v(__old, __vfn, __atime);
391 template<
typename _Tp,
typename _Pred,
392 typename _Clock,
typename _Dur>
394 __atomic_wait_address_until(
const _Tp* __addr, _Pred __pred,
395 const chrono::time_point<_Clock, _Dur>&
398 __detail::__enters_timed_wait __w{__addr};
399 return __w._M_do_wait_until(__pred, __atime);
402 template<
typename _Pred,
403 typename _Clock,
typename _Dur>
405 __atomic_wait_address_until_bare(
const __detail::__platform_wait_t* __addr,
407 const chrono::time_point<_Clock, _Dur>&
410 __detail::__bare_timed_wait __w{__addr};
411 return __w._M_do_wait_until(__pred, __atime);
414 template<
typename _Tp,
typename _ValFn,
415 typename _Rep,
typename _Period>
417 __atomic_wait_address_for_v(
const _Tp* __addr, _Tp&& __old, _ValFn&& __vfn,
418 const chrono::duration<_Rep, _Period>& __rtime) noexcept
420 __detail::__enters_timed_wait __w{__addr};
421 return __w._M_do_wait_for_v(__old, __vfn, __rtime);
424 template<
typename _Tp,
typename _Pred,
425 typename _Rep,
typename _Period>
427 __atomic_wait_address_for(
const _Tp* __addr, _Pred __pred,
428 const chrono::duration<_Rep, _Period>& __rtime) noexcept
431 __detail::__enters_timed_wait __w{__addr};
432 return __w._M_do_wait_for(__pred, __rtime);
435 template<
typename _Pred,
436 typename _Rep,
typename _Period>
438 __atomic_wait_address_for_bare(
const __detail::__platform_wait_t* __addr,
440 const chrono::duration<_Rep, _Period>& __rtime) noexcept
442 __detail::__bare_timed_wait __w{__addr};
443 return __w._M_do_wait_for(__pred, __rtime);
445 _GLIBCXX_END_NAMESPACE_VERSION
constexpr std::remove_reference< _Tp >::type && move(_Tp &&__t) noexcept
Convert a value to an rvalue.
ISO C++ entities toplevel namespace is std.
void sleep_for(const chrono::duration< _Rep, _Period > &__rtime)
this_thread::sleep_for