29#ifndef _GLIBCXX_STOP_TOKEN
30#define _GLIBCXX_STOP_TOKEN
34#define __glibcxx_want_jthread
37#if __cplusplus > 201703L
44namespace std _GLIBCXX_VISIBILITY(default)
46_GLIBCXX_BEGIN_NAMESPACE_VERSION
66 operator=(
const stop_token&)
noexcept =
default;
73 stop_possible()
const noexcept
75 return static_cast<bool>(_M_state) && _M_state->_M_stop_possible();
80 stop_requested()
const noexcept
82 return static_cast<bool>(_M_state) && _M_state->_M_stop_requested();
87 { _M_state.swap(__rhs._M_state); }
92 {
return __a._M_state == __b._M_state; }
96 { __lhs.swap(__rhs); }
100 template<
typename _Callback>
106#if defined __i386__ || defined __x86_64__
107 __builtin_ia32_pause();
112#ifndef __glibcxx_semaphore
113 struct binary_semaphore
115 explicit binary_semaphore(
int __d) : _M_counter(__d > 0) { }
117 void release() { _M_counter.fetch_add(1, memory_order::release); }
122 while (!_M_counter.compare_exchange_weak(__old, 0,
123 memory_order::acquire,
124 memory_order::relaxed))
137 using __cb_type = void(_Stop_cb*)
noexcept;
138 __cb_type* _M_callback;
139 _Stop_cb* _M_prev =
nullptr;
140 _Stop_cb* _M_next =
nullptr;
141 bool* _M_destroyed =
nullptr;
142 binary_semaphore _M_done{0};
144 [[__gnu__::__nonnull__]]
146 _Stop_cb(__cb_type* __cb)
150 void _M_run()
noexcept { _M_callback(
this); }
155 using value_type = uint32_t;
156 static constexpr value_type _S_stop_requested_bit = 1;
157 static constexpr value_type _S_locked_bit = 2;
158 static constexpr value_type _S_ssrc_counter_inc = 4;
162 _Stop_cb* _M_head =
nullptr;
165 _Stop_state_t() =
default;
168 _M_stop_possible()
noexcept
172 return _M_value.load(memory_order::acquire) & ~_S_locked_bit;
176 _M_stop_requested()
noexcept
178 return _M_value.load(memory_order::acquire) & _S_stop_requested_bit;
182 _M_add_owner()
noexcept
184 _M_owners.fetch_add(1, memory_order::relaxed);
188 _M_release_ownership()
noexcept
190 if (_M_owners.fetch_sub(1, memory_order::acq_rel) == 1)
195 _M_add_ssrc()
noexcept
197 _M_value.fetch_add(_S_ssrc_counter_inc, memory_order::relaxed);
201 _M_sub_ssrc()
noexcept
203 _M_value.fetch_sub(_S_ssrc_counter_inc, memory_order::release);
212 auto __old = _M_value.load(memory_order::relaxed);
213 while (!_M_try_lock(__old, memory_order::relaxed))
221 _M_value.fetch_sub(_S_locked_bit, memory_order::release);
225 _M_request_stop()
noexcept
228 auto __old = _M_value.load(memory_order::acquire);
231 if (__old & _S_stop_requested_bit)
234 while (!_M_try_lock_and_stop(__old));
241 _Stop_cb* __cb = _M_head;
242 _M_head = _M_head->_M_next;
245 _M_head->_M_prev =
nullptr;
254 bool __destroyed =
false;
255 __cb->_M_destroyed = &__destroyed;
262 __cb->_M_destroyed =
nullptr;
265 if (!__gnu_cxx::__is_single_threaded())
266 __cb->_M_done.release();
280 [[__gnu__::__nonnull__]]
282 _M_register_callback(_Stop_cb* __cb)
noexcept
284 auto __old = _M_value.load(memory_order::acquire);
287 if (__old & _S_stop_requested_bit)
293 if (__old < _S_ssrc_counter_inc)
299 while (!_M_try_lock(__old));
301 __cb->_M_next = _M_head;
304 _M_head->_M_prev = __cb;
312 [[__gnu__::__nonnull__]]
314 _M_remove_callback(_Stop_cb* __cb)
320 _M_head = _M_head->_M_next;
322 _M_head->_M_prev =
nullptr;
326 else if (__cb->_M_prev)
328 __cb->_M_prev->_M_next = __cb->_M_next;
330 __cb->_M_next->_M_prev = __cb->_M_prev;
346 __cb->_M_done.acquire();
351 if (__cb->_M_destroyed)
352 *__cb->_M_destroyed =
true;
360 _M_try_lock(value_type& __curval,
361 memory_order __failure = memory_order::acquire)
noexcept
363 return _M_do_try_lock(__curval, 0, memory_order::acquire, __failure);
373 _M_try_lock_and_stop(value_type& __curval)
noexcept
375 return _M_do_try_lock(__curval, _S_stop_requested_bit,
376 memory_order::acq_rel, memory_order::acquire);
380 _M_do_try_lock(value_type& __curval, value_type __newbits,
383 if (__curval & _S_locked_bit)
386 __curval = _M_value.load(__failure);
389 __newbits |= _S_locked_bit;
390 return _M_value.compare_exchange_weak(__curval, __curval | __newbits,
391 __success, __failure);
395 struct _Stop_state_ref
397 _Stop_state_ref() =
default;
399 [[__gnu__::__access__(__none__, 2)]]
402 : _M_ptr(
new _Stop_state_t())
405 _Stop_state_ref(
const _Stop_state_ref& __other) noexcept
406 : _M_ptr(__other._M_ptr)
409 _M_ptr->_M_add_owner();
412 _Stop_state_ref(_Stop_state_ref&& __other) noexcept
413 : _M_ptr(__other._M_ptr)
415 __other._M_ptr =
nullptr;
419 operator=(
const _Stop_state_ref& __other)
noexcept
421 if (
auto __ptr = __other._M_ptr; __ptr != _M_ptr)
424 __ptr->_M_add_owner();
426 _M_ptr->_M_release_ownership();
433 operator=(_Stop_state_ref&& __other)
noexcept
435 _Stop_state_ref(
std::move(__other)).swap(*
this);
442 _M_ptr->_M_release_ownership();
446 swap(_Stop_state_ref& __other)
noexcept
447 { std::swap(_M_ptr, __other._M_ptr); }
449 explicit operator bool()
const noexcept {
return _M_ptr !=
nullptr; }
451 _Stop_state_t* operator->()
const noexcept {
return _M_ptr; }
453#if __cpp_impl_three_way_comparison >= 201907L
455 operator==(
const _Stop_state_ref&,
const _Stop_state_ref&) =
default;
458 operator==(
const _Stop_state_ref& __lhs,
const _Stop_state_ref& __rhs)
460 {
return __lhs._M_ptr == __rhs._M_ptr; }
463 operator!=(
const _Stop_state_ref& __lhs,
const _Stop_state_ref& __rhs)
465 {
return __lhs._M_ptr != __rhs._M_ptr; }
469 _Stop_state_t* _M_ptr =
nullptr;
472 _Stop_state_ref _M_state;
475 stop_token(
const _Stop_state_ref& __state) noexcept
491 : _M_state(__other._M_state)
494 _M_state->_M_add_ssrc();
502 if (_M_state != __other._M_state)
505 _M_state = __other._M_state;
507 _M_state->_M_add_ssrc();
518 _M_state->_M_sub_ssrc();
523 stop_possible()
const noexcept
525 return static_cast<bool>(_M_state);
530 stop_requested()
const noexcept
532 return static_cast<bool>(_M_state) && _M_state->_M_stop_requested();
536 request_stop()
const noexcept
539 return _M_state->_M_request_stop();
545 get_token()
const noexcept
553 _M_state.swap(__other._M_state);
560 return __a._M_state == __b._M_state;
570 stop_token::_Stop_state_ref _M_state;
574 template<
typename _Callback>
577 static_assert(is_nothrow_destructible_v<_Callback>);
578 static_assert(is_invocable_v<_Callback>);
581 using callback_type = _Callback;
583 template<
typename _Cb,
587 noexcept(is_nothrow_constructible_v<_Callback, _Cb>)
588 : _M_cb(std::forward<_Cb>(__cb))
590 if (
auto __state = __token._M_state)
592 if (__state->_M_register_callback(&_M_cb))
593 _M_state.swap(__state);
597 template<
typename _Cb,
601 noexcept(is_nothrow_constructible_v<_Callback, _Cb>)
602 : _M_cb(std::forward<_Cb>(__cb))
604 if (
auto& __state = __token._M_state)
606 if (__state->_M_register_callback(&_M_cb))
607 _M_state.swap(__state);
615 _M_state->_M_remove_callback(&_M_cb);
625 struct _Cb_impl : stop_token::_Stop_cb
627 template<
typename _Cb>
630 : _Stop_cb(&_S_execute),
631 _M_cb(std::forward<_Cb>(__cb))
636 [[__gnu__::__nonnull__]]
638 _S_execute(_Stop_cb* __that)
noexcept
640 _Callback& __cb =
static_cast<_Cb_impl*
>(__that)->_M_cb;
641 std::forward<_Callback>(__cb)();
646 stop_token::_Stop_state_ref _M_state;
649 template<
typename _Callback>
652_GLIBCXX_END_NAMESPACE_VERSION
typename enable_if< _Cond, _Tp >::type enable_if_t
Alias template for enable_if.
constexpr std::remove_reference< _Tp >::type && move(_Tp &&__t) noexcept
Convert a value to an rvalue.
memory_order
Enumeration for memory_order.
ISO C++ entities toplevel namespace is std.
void yield() noexcept
Allow the implementation to schedule a different thread.
thread::id get_id() noexcept
The unique identifier of the current thread.
Generic atomic type, primary class template.
Tag type indicating a stop_source should have no shared-stop-state.
Allow testing whether a stop request has been made on a stop_source.
A type that allows a stop request to be made.
A wrapper for callbacks to be run when a stop request is made.