6#ifndef ASYNCPP_FORCE_CUSTOM_STOP_TOKEN
8#define ASYNCPP_FORCE_CUSTOM_STOP_TOKEN 0
12#if defined(_LIBCPP_VERSION) || ASYNCPP_FORCE_CUSTOM_STOP_TOKEN
20#if defined(_LIBCPP_VERSION) || ASYNCPP_FORCE_CUSTOM_STOP_TOKEN
21 struct nostopstate_t {
22 explicit nostopstate_t() =
default;
24 inline constexpr nostopstate_t nostopstate{};
30 stop_token()
noexcept =
default;
32 stop_token(
const stop_token&)
noexcept =
default;
33 stop_token(stop_token&&)
noexcept =
default;
35 ~stop_token() =
default;
37 stop_token& operator=(
const stop_token&)
noexcept =
default;
38 stop_token& operator=(stop_token&&)
noexcept =
default;
40 [[nodiscard]]
bool stop_possible()
const noexcept {
41 return static_cast<bool>(m_state) && m_state->stop_possible();
44 [[nodiscard]]
bool stop_requested()
const noexcept {
45 return static_cast<bool>(m_state) && m_state->stop_requested();
48 void swap(stop_token& rhs)
noexcept { m_state.swap(rhs.m_state); }
50 [[nodiscard]]
friend bool operator==(
const stop_token& lhs,
const stop_token& rhs) {
51 return lhs.m_state == rhs.m_state;
54 friend void swap(stop_token& lhs, stop_token& rhs)
noexcept { lhs.swap(rhs); }
57 friend class stop_source;
58 template<
typename _Callback>
59 friend class stop_callback;
61 static void yield()
noexcept {
62#if defined __i386__ || defined __x86_64__
63 __builtin_ia32_pause();
65 std::this_thread::yield();
68 struct binary_semaphore {
69 explicit binary_semaphore(
int initial) : m_counter(initial > 0) {}
71 void release() { m_counter.fetch_add(1, std::memory_order::release); }
76 !m_counter.compare_exchange_weak(old, 0, std::memory_order::acquire, std::memory_order::relaxed)) {
82 std::atomic<int> m_counter;
85 struct stop_cb_node_t {
86 using cb_fn_t = void(stop_cb_node_t*)
noexcept;
88 stop_cb_node_t* m_prev =
nullptr;
89 stop_cb_node_t* m_next =
nullptr;
90 bool* m_destroyed =
nullptr;
91 binary_semaphore m_done{0};
93 explicit stop_cb_node_t(cb_fn_t* cb) : m_callback(cb) {}
95 void run()
noexcept { m_callback(
this); }
99 using value_type = uint32_t;
100 static constexpr value_type mask_stop_requested_bit = 1;
101 static constexpr value_type mask_locked_bit = 2;
102 static constexpr value_type mask_ssrc_counter_inc = 4;
104 std::atomic<value_type> m_owners{1};
105 std::atomic<value_type> m_value{mask_ssrc_counter_inc};
106 stop_cb_node_t* m_head =
nullptr;
107 std::thread::id m_requester;
110 stop_state_t() =
default;
112 bool stop_possible()
noexcept {
return m_value.load(std::memory_order::acquire) & ~mask_locked_bit; }
114 bool stop_requested()
noexcept {
115 return m_value.load(std::memory_order::acquire) & mask_stop_requested_bit;
118 void add_owner()
noexcept { m_owners.fetch_add(1, std::memory_order::relaxed); }
120 void release_ownership()
noexcept {
121 if (m_owners.fetch_sub(1, std::memory_order::acq_rel) == 1)
delete this;
124 void add_ssrc()
noexcept { m_value.fetch_add(mask_ssrc_counter_inc, std::memory_order::relaxed); }
126 void sub_ssrc()
noexcept { m_value.fetch_sub(mask_ssrc_counter_inc, std::memory_order::release); }
128 bool request_stop()
noexcept {
129 auto old = m_value.load(std::memory_order::acquire);
131 if (old & mask_stop_requested_bit)
return false;
133 !try_lock(old, mask_stop_requested_bit, std::memory_order::acq_rel, std::memory_order::acquire));
135 m_requester = std::this_thread::get_id();
138 bool is_last_cb{
true};
139 stop_cb_node_t* cb = m_head;
140 m_head = m_head->m_next;
142 m_head->m_prev =
nullptr;
148 bool is_destroyed =
false;
149 cb->m_destroyed = &is_destroyed;
154 cb->m_destroyed =
nullptr;
155 cb->m_done.release();
158 if (is_last_cb)
return true;
167 bool register_callback(stop_cb_node_t* cb)
noexcept {
168 auto old = m_value.load(std::memory_order::acquire);
170 if (old & mask_stop_requested_bit) {
175 if (old < mask_ssrc_counter_inc)
return false;
176 }
while (!try_lock(old, 0, std::memory_order::acquire, std::memory_order::acquire));
179 if (m_head) { m_head->m_prev = cb; }
185 void remove_callback(stop_cb_node_t* cb) {
189 m_head = m_head->m_next;
190 if (m_head) m_head->m_prev =
nullptr;
193 }
else if (cb->m_prev) {
194 cb->m_prev->m_next = cb->m_next;
195 if (cb->m_next) cb->m_next->m_prev = cb->m_prev;
202 if (!(m_requester == std::this_thread::get_id())) {
203 cb->m_done.acquire();
207 if (cb->m_destroyed) *cb->m_destroyed =
true;
211 void lock()
noexcept {
212 auto old = m_value.load(std::memory_order::relaxed);
213 while (!try_lock(old, 0, std::memory_order::acquire, std::memory_order::relaxed)) {}
216 void unlock()
noexcept { m_value.fetch_sub(mask_locked_bit, std::memory_order::release); }
218 bool try_lock(value_type& curval, value_type newbits, std::memory_order success,
219 std::memory_order failure)
noexcept {
220 if (curval & mask_locked_bit) {
222 curval = m_value.load(failure);
225 newbits |= mask_locked_bit;
226 return m_value.compare_exchange_weak(curval, curval | newbits, success, failure);
230 struct stop_state_ref {
231 stop_state_ref() =
default;
233 explicit stop_state_ref(
const stop_source&) : m_ptr(
new stop_state_t()) {}
235 stop_state_ref(
const stop_state_ref& other) noexcept : m_ptr(other.m_ptr) {
236 if (m_ptr) m_ptr->add_owner();
239 stop_state_ref(stop_state_ref&& other) noexcept : m_ptr(other.m_ptr) { other.m_ptr =
nullptr; }
241 stop_state_ref& operator=(
const stop_state_ref& other)
noexcept {
242 if (
auto ptr = other.m_ptr; ptr != m_ptr) {
243 if (ptr) ptr->add_owner();
244 if (m_ptr) m_ptr->release_ownership();
250 stop_state_ref& operator=(stop_state_ref&& other)
noexcept {
251 stop_state_ref(std::move(other)).swap(*
this);
256 if (m_ptr) m_ptr->release_ownership();
259 void swap(stop_state_ref& other)
noexcept { std::swap(m_ptr, other.m_ptr); }
261 explicit operator bool()
const noexcept {
return m_ptr !=
nullptr; }
263 stop_state_t* operator->()
const noexcept {
return m_ptr; }
265#if __cpp_impl_three_way_comparison >= 201907L
266 friend bool operator==(
const stop_state_ref&,
const stop_state_ref&) =
default;
268 friend bool operator==(
const stop_state_ref& lhs,
const stop_state_ref& rhs)
noexcept {
269 return lhs.m_ptr == rhs.m_ptr;
272 friend bool operator!=(
const stop_state_ref& lhs,
const stop_state_ref& rhs)
noexcept {
273 return lhs.m_ptr != rhs.m_ptr;
278 stop_state_t* m_ptr =
nullptr;
281 stop_state_ref m_state;
283 explicit stop_token(
const stop_state_ref& state) noexcept : m_state{state} {}
289 stop_source() : m_state(*
this) {}
291 explicit stop_source(nostopstate_t)
noexcept {}
293 stop_source(
const stop_source& other) noexcept : m_state(other.m_state) {
294 if (m_state) m_state->add_ssrc();
297 stop_source(stop_source&&)
noexcept =
default;
299 stop_source& operator=(
const stop_source& other)
noexcept {
300 if (m_state != other.m_state) {
301 stop_source sink(std::move(*
this));
302 m_state = other.m_state;
303 if (m_state) m_state->add_ssrc();
308 stop_source& operator=(stop_source&&)
noexcept =
default;
311 if (m_state) m_state->sub_ssrc();
314 [[nodiscard]]
bool stop_possible()
const noexcept {
return static_cast<bool>(m_state); }
316 [[nodiscard]]
bool stop_requested()
const noexcept {
317 return static_cast<bool>(m_state) && m_state->stop_requested();
320 bool request_stop()
const noexcept {
321 if (stop_possible())
return m_state->request_stop();
325 [[nodiscard]] stop_token get_token()
const noexcept {
return stop_token{m_state}; }
327 void swap(stop_source& other)
noexcept { m_state.swap(other.m_state); }
329 [[nodiscard]]
friend bool operator==(
const stop_source& a,
const stop_source& b)
noexcept {
330 return a.m_state == b.m_state;
333 friend void swap(stop_source& lhs, stop_source& rhs)
noexcept { lhs.swap(rhs); }
336 stop_token::stop_state_ref m_state;
340 template<
typename Callback>
341 class [[nodiscard]] stop_callback {
342 static_assert(std::is_nothrow_destructible_v<Callback>);
343 static_assert(std::is_invocable_v<Callback>);
346 using callback_type = Callback;
348 template<
typename Cb>
349 requires(std::is_constructible_v<Callback, Cb>)
350 explicit stop_callback(
const stop_token& token, Cb&& cb)
noexcept(std::is_nothrow_constructible_v<Callback, Cb>)
351 : m_cb(std::forward<Cb>(cb)) {
352 if (
auto state = token.m_state) {
353 if (state->register_callback(&m_cb)) m_state.swap(state);
357 template<
typename Cb>
358 requires(std::is_constructible_v<Callback, Cb>)
359 explicit stop_callback(stop_token&& token, Cb&& cb)
noexcept(std::is_nothrow_constructible_v<Callback, Cb>)
360 : m_cb(std::forward<Cb>(cb)) {
361 if (
auto& state = token.m_state) {
362 if (state->register_callback(&m_cb)) m_state.swap(state);
367 if (m_state) { m_state->remove_callback(&m_cb); }
370 stop_callback(
const stop_callback&) =
delete;
371 stop_callback& operator=(
const stop_callback&) =
delete;
372 stop_callback(stop_callback&&) =
delete;
373 stop_callback& operator=(stop_callback&&) =
delete;
376 struct cb_impl : stop_token::stop_cb_node_t {
377 template<
typename Cb>
378 explicit cb_impl(Cb&& cb) : stop_cb_node_t(&execute), m_cb(std::forward<Cb>(cb)) {}
382 static void execute(stop_cb_node_t* that)
noexcept {
383 Callback& cb =
static_cast<cb_impl*
>(that)->m_cb;
384 std::forward<Callback>(cb)();
389 stop_token::stop_state_ref m_state;
392 template<
typename Callback>
393 stop_callback(stop_token, Callback) -> stop_callback<Callback>;
396 using stop_source = std::stop_source;
397 using stop_token = std::stop_token;
398 template<
typename Callback>
399 using stop_callback = std::stop_callback<Callback>;
400 using nostopstate_t = std::nostopstate_t;
401 inline constexpr nostopstate_t nostopstate{};