29#ifndef _GLIBCXX_GENERATOR
30#define _GLIBCXX_GENERATOR
33#pragma GCC system_header
37#define __glibcxx_want_generator
40#ifdef __cpp_lib_generator
44#include <bits/elements_of.h>
45#include <bits/uses_allocator.h>
60namespace std _GLIBCXX_VISIBILITY(default)
62_GLIBCXX_BEGIN_NAMESPACE_VERSION
79 template<
typename _Ref,
typename _Val =
void,
typename _Alloc =
void>
87 template<
typename _Ref,
typename _Val>
88 using _Reference_t = __conditional_t<is_void_v<_Val>,
92 template<
typename _Reference>
93 using _Yield_t = __conditional_t<is_reference_v<_Reference>,
98 template<
typename _Ref,
typename _Val>
99 using _Yield2_t = _Yield_t<_Reference_t<_Ref, _Val>>;
101 template<
typename>
constexpr bool __is_generator =
false;
102 template<
typename _Val,
typename _Ref,
typename _Alloc>
103 constexpr bool __is_generator<std::generator<_Val, _Ref, _Alloc>> =
true;
107 template<
typename _Yielded>
108 class _Promise_erased
110 static_assert(is_reference_v<_Yielded>);
111 using _Yielded_deref = remove_reference_t<_Yielded>;
112 using _Yielded_decvref = remove_cvref_t<_Yielded>;
113 using _ValuePtr = add_pointer_t<_Yielded>;
114 using _Coro_handle = std::coroutine_handle<_Promise_erased>;
116 template<
typename,
typename,
typename>
117 friend class std::generator;
119 template<
typename _Gen>
120 struct _Recursive_awaiter;
122 friend struct _Recursive_awaiter;
123 struct _Copy_awaiter;
124 struct _Subyield_state;
125 struct _Final_awaiter;
128 initial_suspend() const noexcept
132 yield_value(_Yielded __val)
noexcept
139 yield_value(
const _Yielded_deref& __val)
140 noexcept (is_nothrow_constructible_v<_Yielded_decvref,
141 const _Yielded_deref&>)
142 requires (is_rvalue_reference_v<_Yielded>
143 && constructible_from<_Yielded_decvref,
144 const _Yielded_deref&>)
145 {
return _Copy_awaiter(__val, _M_bottom_value()); }
147 template<
typename _R2,
typename _V2,
typename _A2,
typename _U2>
148 requires std::same_as<_Yield2_t<_R2, _V2>, _Yielded>
150 yield_value(ranges::elements_of<generator<_R2, _V2, _A2>&&, _U2> __r)
152 {
return _Recursive_awaiter {
std::move(__r.range) }; }
154 template<ranges::input_range _R,
typename _Alloc>
155 requires convertible_to<ranges::range_reference_t<_R>, _Yielded>
157 yield_value(ranges::elements_of<_R, _Alloc> __r)
159 auto __n = [] (allocator_arg_t, _Alloc,
160 ranges::iterator_t<_R> __i,
161 ranges::sentinel_t<_R> __s)
162 -> generator<_Yielded, ranges::range_value_t<_R>, _Alloc> {
163 for (; __i != __s; ++__i)
164 co_yield static_cast<_Yielded
>(*__i);
166 return yield_value(ranges::elements_of(__n(allocator_arg,
168 ranges::begin(__r.range),
169 ranges::end(__r.range))));
174 final_suspend() noexcept
178 unhandled_exception()
185 if (_M_nest._M_is_bottom())
191 void await_transform() =
delete;
192 void return_void() const noexcept {}
196 _M_bottom_value() noexcept
197 {
return _M_nest._M_bottom_value(*
this); }
201 {
return _M_nest._M_value(*
this); }
203 _Subyield_state _M_nest;
207 template<
typename _Yielded>
208 struct _Promise_erased<_Yielded>::_Subyield_state
212 _Coro_handle _M_bottom;
213 _Coro_handle _M_parent;
219 _ValuePtr _M_value =
nullptr;
228 _M_is_bottom() const noexcept
229 {
return !std::holds_alternative<_Frame>(this->_M_stack); }
234 if (
auto __f = std::get_if<_Frame>(&this->_M_stack))
235 return __f->_M_bottom.promise()._M_nest._M_top();
237 auto __bf = std::get_if<_Bottom_frame>(&this->_M_stack);
238 __glibcxx_assert(__bf);
243 _M_push(_Coro_handle __current, _Coro_handle __subyield)
noexcept
245 __glibcxx_assert(&__current.promise()._M_nest ==
this);
246 __glibcxx_assert(this->_M_top() == __current);
248 __subyield.promise()._M_nest._M_jump_in(__current, __subyield);
251 std::coroutine_handle<>
254 if (
auto __f = std::get_if<_Frame>(&this->_M_stack))
258 auto __p = this->_M_top() = __f->_M_parent;
263 return std::noop_coroutine();
267 _M_jump_in(_Coro_handle __rest, _Coro_handle __new)
noexcept
269 __glibcxx_assert(&__new.promise()._M_nest ==
this);
270 __glibcxx_assert(this->_M_is_bottom());
275 __glibcxx_assert(!this->_M_top());
277 auto& __rn = __rest.promise()._M_nest;
278 __rn._M_top() = __new;
281 auto __bott = __rest;
282 if (
auto __f = std::get_if<_Frame>(&__rn._M_stack))
286 __bott = __f->_M_bottom;
288 this->_M_stack = _Frame {
295 _M_bottom_value(_Promise_erased& __current)
noexcept
297 __glibcxx_assert(&__current._M_nest ==
this);
298 if (
auto __bf = std::get_if<_Bottom_frame>(&this->_M_stack))
299 return __bf->_M_value;
300 auto __f = std::get_if<_Frame>(&this->_M_stack);
301 __glibcxx_assert(__f);
302 auto& __p = __f->_M_bottom.promise();
303 return __p._M_nest._M_value(__p);
307 _M_value(_Promise_erased& __current)
noexcept
309 __glibcxx_assert(&__current._M_nest ==
this);
310 auto __bf = std::get_if<_Bottom_frame>(&this->_M_stack);
311 __glibcxx_assert(__bf);
312 return __bf->_M_value;
316 template<
typename _Yielded>
317 struct _Promise_erased<_Yielded>::_Final_awaiter
319 bool await_ready() noexcept
322 template<
typename _Promise>
323 auto await_suspend(std::coroutine_handle<_Promise> __c)
noexcept
325#ifdef __glibcxx_is_pointer_interconvertible
327 _Promise_erased, _Promise>);
330 auto& __n = __c.promise()._M_nest;
334 void await_resume() noexcept {}
337 template<
typename _Yielded>
338 struct _Promise_erased<_Yielded>::_Copy_awaiter
340 _Yielded_decvref _M_value;
341 _ValuePtr& _M_bottom_value;
343 constexpr bool await_ready() noexcept
346 template<
typename _Promise>
347 void await_suspend(std::coroutine_handle<_Promise>)
noexcept
349#ifdef __glibcxx_is_pointer_interconvertible
351 _Promise_erased, _Promise>);
357 await_resume() const noexcept
361 template<
typename _Yielded>
362 template<
typename _Gen>
363 struct _Promise_erased<_Yielded>::_Recursive_awaiter
366 static_assert(__is_generator<_Gen>);
367 static_assert(std::same_as<typename _Gen::yielded, _Yielded>);
369 _Recursive_awaiter(_Gen __gen) noexcept
371 { this->_M_gen._M_mark_as_started(); }
374 await_ready() const noexcept
378 template<
typename _Promise>
379 std::coroutine_handle<>
380 await_suspend(std::coroutine_handle<_Promise> __p)
noexcept
382#ifdef __glibcxx_is_pointer_interconvertible
384 _Promise_erased, _Promise>);
387 auto __c = _Coro_handle::from_address(__p.address());
388 auto __t = _Coro_handle::from_address(this->_M_gen._M_coro.address());
389 __p.promise()._M_nest._M_push(__c, __t);
395 if (
auto __e = _M_gen._M_coro.promise()._M_except)
402 alignas(__STDCPP_DEFAULT_NEW_ALIGNMENT__)
403 char _M_data[__STDCPP_DEFAULT_NEW_ALIGNMENT__];
406 _M_cnt(std::size_t __sz)
noexcept
408 auto __blksz =
sizeof(_Alloc_block);
409 return (__sz + __blksz - 1) / __blksz;
413 template<
typename _All>
414 concept _Stateless_alloc = (allocator_traits<_All>::is_always_equal::value
415 && default_initializable<_All>);
417 template<
typename _Alloc>
420 using _ATr = allocator_traits<_Alloc>;
421 using _Rebound =
typename _ATr::template rebind_alloc<_Alloc_block>;
422 using _Rebound_ATr =
typename _ATr
423 ::template rebind_traits<_Alloc_block>;
424 static_assert(is_pointer_v<typename _Rebound_ATr::pointer>,
425 "Must use allocators for true pointers with generators");
428 _M_alloc_address(std::uintptr_t __fn, std::uintptr_t __fsz)
noexcept
430 auto __an = __fn + __fsz;
431 auto __ba =
alignof(_Rebound);
432 return reinterpret_cast<_Rebound*
>(((__an + __ba - 1) / __ba) * __ba);
436 _M_alloc_size(std::size_t __csz)
noexcept
438 auto __ba =
alignof(_Rebound);
443 return __csz + __ba +
sizeof(_Rebound);
447 _M_allocate(_Rebound __b, std::size_t __csz)
449 if constexpr (_Stateless_alloc<_Rebound>)
451 return __b.allocate(_Alloc_block::_M_cnt(__csz));
454 auto __nsz = _Alloc_block::_M_cnt(_M_alloc_size(__csz));
455 auto __f = __b.allocate(__nsz);
456 auto __fn =
reinterpret_cast<std::uintptr_t
>(__f);
457 auto __an = _M_alloc_address(__fn, __csz);
465 operator new(std::size_t __sz)
466 requires default_initializable<_Rebound>
467 {
return _M_allocate({}, __sz); }
469 template<
typename _Na,
typename... _Args>
471 operator new(std::size_t __sz,
472 allocator_arg_t,
const _Na& __na,
474 requires convertible_to<const _Na&, _Alloc>
476 return _M_allocate(
static_cast<_Rebound
>(
static_cast<_Alloc
>(__na)),
480 template<
typename _This,
typename _Na,
typename... _Args>
482 operator new(std::size_t __sz,
484 allocator_arg_t,
const _Na& __na,
486 requires convertible_to<const _Na&, _Alloc>
488 return _M_allocate(
static_cast<_Rebound
>(
static_cast<_Alloc
>(__na)),
493 operator delete(
void* __ptr, std::size_t __csz)
noexcept
495 if constexpr (_Stateless_alloc<_Rebound>)
498 return __b.deallocate(
reinterpret_cast<_Alloc_block*
>(__ptr),
499 _Alloc_block::_M_cnt(__csz));
503 auto __nsz = _Alloc_block::_M_cnt(_M_alloc_size(__csz));
504 auto __fn =
reinterpret_cast<std::uintptr_t
>(__ptr);
505 auto __an = _M_alloc_address(__fn, __csz);
508 __b.deallocate(
reinterpret_cast<_Alloc_block*
>(__ptr), __nsz);
514 class _Promise_alloc<void>
516 using _Dealloc_fn = void (*)(
void*, std::size_t);
519 _M_dealloc_address(std::uintptr_t __fn, std::uintptr_t __fsz)
noexcept
521 auto __an = __fn + __fsz;
522 auto __ba =
alignof(_Dealloc_fn);
523 auto __aligned = ((__an + __ba - 1) / __ba) * __ba;
524 return reinterpret_cast<_Dealloc_fn*
>(__aligned);
527 template<
typename _Rebound>
529 _M_alloc_address(std::uintptr_t __fn, std::uintptr_t __fsz)
noexcept
530 requires (!_Stateless_alloc<_Rebound>)
532 auto __ba =
alignof(_Rebound);
533 auto __da = _M_dealloc_address(__fn, __fsz);
534 auto __aan =
reinterpret_cast<std::uintptr_t
>(__da);
535 __aan +=
sizeof(_Dealloc_fn);
536 auto __aligned = ((__aan + __ba - 1) / __ba) * __ba;
537 return reinterpret_cast<_Rebound*
>(__aligned);
540 template<
typename _Rebound>
542 _M_alloc_size(std::size_t __csz)
noexcept
546 std::size_t __aa = 0;
547 std::size_t __as = 0;
548 if constexpr (!std::same_as<_Rebound, void>)
550 __aa =
alignof(_Rebound);
551 __as =
sizeof(_Rebound);
553 auto __ba = __aa +
alignof(_Dealloc_fn);
554 return __csz + __ba + __as +
sizeof(_Dealloc_fn);
557 template<
typename _Rebound>
559 _M_deallocator(
void* __ptr, std::size_t __csz)
noexcept
561 auto __asz = _M_alloc_size<_Rebound>(__csz);
562 auto __nblk = _Alloc_block::_M_cnt(__asz);
564 if constexpr (_Stateless_alloc<_Rebound>)
567 __b.deallocate(
reinterpret_cast<_Alloc_block*
>(__ptr), __nblk);
571 auto __fn =
reinterpret_cast<std::uintptr_t
>(__ptr);
572 auto __an = _M_alloc_address<_Rebound>(__fn, __csz);
575 __b.deallocate(
reinterpret_cast<_Alloc_block*
>(__ptr), __nblk);
579 template<
typename _Na>
581 _M_allocate(
const _Na& __na, std::size_t __csz)
584 ::template rebind_alloc<_Alloc_block>;
586 ::template rebind_traits<_Alloc_block>;
588 static_assert(is_pointer_v<typename _Rebound_ATr::pointer>,
589 "Must use allocators for true pointers with generators");
591 _Dealloc_fn __d = &_M_deallocator<_Rebound>;
592 auto __b =
static_cast<_Rebound
>(__na);
593 auto __asz = _M_alloc_size<_Rebound>(__csz);
594 auto __nblk = _Alloc_block::_M_cnt(__asz);
595 void* __p = __b.allocate(__nblk);
596 auto __pn =
reinterpret_cast<std::uintptr_t
>(__p);
597 *_M_dealloc_address(__pn, __csz) = __d;
598 if constexpr (!_Stateless_alloc<_Rebound>)
600 auto __an = _M_alloc_address<_Rebound>(__pn, __csz);
607 operator new(std::size_t __sz)
609 auto __nsz = _M_alloc_size<void>(__sz);
610 _Dealloc_fn __d = [] (
void* __ptr, std::size_t __sz)
612 ::operator
delete(__ptr, _M_alloc_size<void>(__sz));
614 auto __p = ::operator
new(__nsz);
615 auto __pn =
reinterpret_cast<uintptr_t
>(__p);
616 *_M_dealloc_address(__pn, __sz) = __d;
620 template<
typename _Na,
typename... _Args>
622 operator new(std::size_t __sz,
623 allocator_arg_t,
const _Na& __na,
625 {
return _M_allocate(__na, __sz); }
627 template<
typename _This,
typename _Na,
typename... _Args>
629 operator new(std::size_t __sz,
631 allocator_arg_t,
const _Na& __na,
633 {
return _M_allocate(__na, __sz); }
636 operator delete(
void* __ptr, std::size_t __sz)
noexcept
639 auto __pn =
reinterpret_cast<uintptr_t
>(__ptr);
640 __d = *_M_dealloc_address(__pn, __sz);
645 template<
typename _Tp>
646 concept _Cv_unqualified_object = is_object_v<_Tp>
647 && same_as<_Tp, remove_cv_t<_Tp>>;
651 template<
typename _Ref,
typename _Val,
typename _Alloc>
653 :
public ranges::view_interface<generator<_Ref, _Val, _Alloc>>
655 using _Value = __conditional_t<is_void_v<_Val>,
656 remove_cvref_t<_Ref>,
658 static_assert(__gen::_Cv_unqualified_object<_Value>,
659 "Generator value must be a cv-unqualified object type");
660 using _Reference = __gen::_Reference_t<_Ref, _Val>;
661 static_assert(is_reference_v<_Reference>
662 || (__gen::_Cv_unqualified_object<_Reference>
663 && copy_constructible<_Reference>),
664 "Generator reference type must be either a cv-unqualified "
665 "object type that is trivially constructible or a "
668 using _RRef = __conditional_t<
669 is_reference_v<_Reference>,
670 remove_reference_t<_Reference>&&,
674 static_assert(common_reference_with<_Reference&&, _Value&&>);
675 static_assert(common_reference_with<_Reference&&, _RRef&&>);
676 static_assert(common_reference_with<_RRef&&, const _Value&>);
678 using _Yielded = __gen::_Yield_t<_Reference>;
679 using _Erased_promise = __gen::_Promise_erased<_Yielded>;
683 friend _Erased_promise;
684 friend struct _Erased_promise::_Subyield_state;
686 using yielded = _Yielded;
688 struct promise_type : _Erased_promise, __gen::_Promise_alloc<_Alloc>
690 generator get_return_object() noexcept
691 {
return { coroutine_handle<promise_type>::from_promise(*
this) }; }
694#ifdef __glibcxx_is_pointer_interconvertible
699 generator(
const generator&) =
delete;
701 generator(generator&& __other) noexcept
702 : _M_coro(std::__exchange(__other._M_coro,
nullptr)),
703 _M_began(std::__exchange(__other._M_began,
false))
708 if (
auto& __c = this->_M_coro)
713 operator=(generator __other)
noexcept
715 swap(__other._M_coro, this->_M_coro);
716 swap(__other._M_began, this->_M_began);
722 this->_M_mark_as_started();
723 auto __h = _Coro_handle::from_promise(_M_coro.promise());
724 __h.promise()._M_nest._M_top() = __h;
733 using _Coro_handle = std::coroutine_handle<_Erased_promise>;
735 generator(coroutine_handle<promise_type> __coro) noexcept
736 : _M_coro {
move(__coro) }
740 _M_mark_as_started() noexcept
742 __glibcxx_assert(!this->_M_began);
743 this->_M_began =
true;
746 coroutine_handle<promise_type> _M_coro;
747 bool _M_began =
false;
750 template<
class _Ref,
class _Val,
class _Alloc>
751 struct generator<_Ref, _Val, _Alloc>::_Iterator
753 using value_type = _Value;
754 using difference_type = ptrdiff_t;
757 operator==(
const _Iterator& __i, default_sentinel_t)
noexcept
758 {
return __i._M_coro.done(); }
760 friend class generator;
762 _Iterator(_Iterator&& __o) noexcept
763 : _M_coro(std::__exchange(__o._M_coro, {}))
767 operator=(_Iterator&& __o)
noexcept
769 this->_M_coro = std::__exchange(__o._M_coro, {});
782 { this->operator++(); }
786 const noexcept(is_nothrow_move_constructible_v<_Reference>)
788 auto& __p = this->_M_coro.promise();
789 return static_cast<_Reference
>(*__p._M_value());
793 friend class generator;
795 _Iterator(_Coro_handle __g)
801 auto& __t = this->_M_coro.promise()._M_nest._M_top();
805 _Coro_handle _M_coro;
812 template<
typename _Ref,
typename _Val =
void>
813 using generator = std::generator<_Ref, _Val, polymorphic_allocator<std::byte>>;
817_GLIBCXX_END_NAMESPACE_VERSION
constexpr complex< _Tp > operator*(const complex< _Tp > &__x, const complex< _Tp > &__y)
Return new complex value x times y.
constexpr bool is_pointer_interconvertible_base_of_v
constexpr _Tp * addressof(_Tp &__r) noexcept
Returns the actual address of the object or function referenced by r, even in the presence of an over...
constexpr std::remove_reference< _Tp >::type && move(_Tp &&__t) noexcept
Convert a value to an rvalue.
_Tp * end(valarray< _Tp > &__va) noexcept
Return an iterator pointing to one past the last element of the valarray.
_Tp * begin(valarray< _Tp > &__va) noexcept
Return an iterator pointing to the first element of the valarray.
exception_ptr current_exception() noexcept
void rethrow_exception(exception_ptr)
Throw the object pointed to by the exception_ptr.
ISO C++ entities toplevel namespace is std.
constexpr default_sentinel_t default_sentinel
A default sentinel value.
Uniform interface to all allocator types.
An opaque pointer to an arbitrary exception.