libstdc++: stop_token Source File (original) (raw)

1

2

3

4

5

6

7

8

9

10

11

12

13

14

15

16

17

18

19

20

21

22

23

24

25

26

27

28

29#ifndef _GLIBCXX_STOP_TOKEN

30#define _GLIBCXX_STOP_TOKEN

31

33

34#define __glibcxx_want_jthread

36

37#if __cplusplus > 201703L

38

41

43

44namespace std _GLIBCXX_VISIBILITY(default)

45{

46_GLIBCXX_BEGIN_NAMESPACE_VERSION

47

48

51

52 class stop_source;

53

54

56 {

57 public:

59

62

64

66 operator=(const stop_token&) noexcept = default;

67

69 operator=(stop_token&&) noexcept = default;

70

71 [[nodiscard]]

72 bool

73 stop_possible() const noexcept

74 {

75 return static_cast<bool>(_M_state) && _M_state->_M_stop_possible();

76 }

77

78 [[nodiscard]]

79 bool

80 stop_requested() const noexcept

81 {

82 return static_cast<bool>(_M_state) && _M_state->_M_stop_requested();

83 }

84

85 void

87 { _M_state.swap(__rhs._M_state); }

88

89 [[nodiscard]]

90 friend bool

92 { return __a._M_state == __b._M_state; }

93

94 friend void

96 { __lhs.swap(__rhs); }

97

98 private:

100 template<typename _Callback>

102

103 static void

104 _S_yield() noexcept

105 {

106#if defined __i386__ || defined __x86_64__

107 __builtin_ia32_pause();

108#endif

110 }

111

112#ifndef __glibcxx_semaphore

113 struct binary_semaphore

114 {

115 explicit binary_semaphore(int __d) : _M_counter(__d > 0) { }

116

117 void release() { _M_counter.fetch_add(1, memory_order::release); }

118

119 void acquire()

120 {

121 int __old = 1;

122 while (!_M_counter.compare_exchange_weak(__old, 0,

123 memory_order::acquire,

124 memory_order::relaxed))

125 {

126 __old = 1;

127 _S_yield();

128 }

129 }

130

132 };

133#endif

134

135 struct _Stop_cb

136 {

137 using __cb_type = void(_Stop_cb*) noexcept;

138 __cb_type* _M_callback;

139 _Stop_cb* _M_prev = nullptr;

140 _Stop_cb* _M_next = nullptr;

141 bool* _M_destroyed = nullptr;

142 binary_semaphore _M_done{0};

143

144 [[__gnu__::__nonnull__]]

145 explicit

146 _Stop_cb(__cb_type* __cb)

147 : _M_callback(__cb)

148 { }

149

150 void _M_run() noexcept { _M_callback(this); }

151 };

152

153 struct _Stop_state_t

154 {

155 using value_type = uint32_t;

156 static constexpr value_type _S_stop_requested_bit = 1;

157 static constexpr value_type _S_locked_bit = 2;

158 static constexpr value_type _S_ssrc_counter_inc = 4;

159

162 _Stop_cb* _M_head = nullptr;

164

165 _Stop_state_t() = default;

166

167 bool

168 _M_stop_possible() noexcept

169 {

170

171

172 return _M_value.load(memory_order::acquire) & ~_S_locked_bit;

173 }

174

175 bool

176 _M_stop_requested() noexcept

177 {

178 return _M_value.load(memory_order::acquire) & _S_stop_requested_bit;

179 }

180

181 void

182 _M_add_owner() noexcept

183 {

184 _M_owners.fetch_add(1, memory_order::relaxed);

185 }

186

187 void

188 _M_release_ownership() noexcept

189 {

190 if (_M_owners.fetch_sub(1, memory_order::acq_rel) == 1)

191 delete this;

192 }

193

194 void

195 _M_add_ssrc() noexcept

196 {

197 _M_value.fetch_add(_S_ssrc_counter_inc, memory_order::relaxed);

198 }

199

200 void

201 _M_sub_ssrc() noexcept

202 {

203 _M_value.fetch_sub(_S_ssrc_counter_inc, memory_order::release);

204 }

205

206

207 void

208 _M_lock() noexcept

209 {

210

211

212 auto __old = _M_value.load(memory_order::relaxed);

213 while (!_M_try_lock(__old, memory_order::relaxed))

214 { }

215 }

216

217

218 void

219 _M_unlock() noexcept

220 {

221 _M_value.fetch_sub(_S_locked_bit, memory_order::release);

222 }

223

224 bool

225 _M_request_stop() noexcept

226 {

227

228 auto __old = _M_value.load(memory_order::acquire);

229 do

230 {

231 if (__old & _S_stop_requested_bit)

232 return false;

233 }

234 while (!_M_try_lock_and_stop(__old));

235

237

238 while (_M_head)

239 {

240 bool __last_cb;

241 _Stop_cb* __cb = _M_head;

242 _M_head = _M_head->_M_next;

243 if (_M_head)

244 {

245 _M_head->_M_prev = nullptr;

246 __last_cb = false;

247 }

248 else

249 __last_cb = true;

250

251

252 _M_unlock();

253

254 bool __destroyed = false;

255 __cb->_M_destroyed = &__destroyed;

256

257

258 __cb->_M_run();

259

260 if (!__destroyed)

261 {

262 __cb->_M_destroyed = nullptr;

263

264

265 if (!__gnu_cxx::__is_single_threaded())

266 __cb->_M_done.release();

267 }

268

269

270 if (__last_cb)

271 return true;

272

273 _M_lock();

274 }

275

276 _M_unlock();

277 return true;

278 }

279

280 [[__gnu__::__nonnull__]]

281 bool

282 _M_register_callback(_Stop_cb* __cb) noexcept

283 {

284 auto __old = _M_value.load(memory_order::acquire);

285 do

286 {

287 if (__old & _S_stop_requested_bit)

288 {

289 __cb->_M_run();

290 return false;

291 }

292

293 if (__old < _S_ssrc_counter_inc)

294

295

296

297 return false;

298 }

299 while (!_M_try_lock(__old));

300

301 __cb->_M_next = _M_head;

302 if (_M_head)

303 {

304 _M_head->_M_prev = __cb;

305 }

306 _M_head = __cb;

307 _M_unlock();

308 return true;

309 }

310

311

312 [[__gnu__::__nonnull__]]

313 void

314 _M_remove_callback(_Stop_cb* __cb)

315 {

316 _M_lock();

317

318 if (__cb == _M_head)

319 {

320 _M_head = _M_head->_M_next;

321 if (_M_head)

322 _M_head->_M_prev = nullptr;

323 _M_unlock();

324 return;

325 }

326 else if (__cb->_M_prev)

327 {

328 __cb->_M_prev->_M_next = __cb->_M_next;

329 if (__cb->_M_next)

330 __cb->_M_next->_M_prev = __cb->_M_prev;

331 _M_unlock();

332 return;

333 }

334

335 _M_unlock();

336

337

338

339

340

341

342

344 {

345

346 __cb->_M_done.acquire();

347

348 return;

349 }

350

351 if (__cb->_M_destroyed)

352 *__cb->_M_destroyed = true;

353 }

354

355

356

357

358

359 bool

360 _M_try_lock(value_type& __curval,

361 memory_order __failure = memory_order::acquire) noexcept

362 {

363 return _M_do_try_lock(__curval, 0, memory_order::acquire, __failure);

364 }

365

366

367

368

369

370

371

372 bool

373 _M_try_lock_and_stop(value_type& __curval) noexcept

374 {

375 return _M_do_try_lock(__curval, _S_stop_requested_bit,

376 memory_order::acq_rel, memory_order::acquire);

377 }

378

379 bool

380 _M_do_try_lock(value_type& __curval, value_type __newbits,

382 {

383 if (__curval & _S_locked_bit)

384 {

385 _S_yield();

386 __curval = _M_value.load(__failure);

387 return false;

388 }

389 __newbits |= _S_locked_bit;

390 return _M_value.compare_exchange_weak(__curval, __curval | __newbits,

391 __success, __failure);

392 }

393 };

394

395 struct _Stop_state_ref

396 {

397 _Stop_state_ref() = default;

398

399 [[__gnu__::__access__(__none__, 2)]]

400 explicit

402 : _M_ptr(new _Stop_state_t())

403 { }

404

405 _Stop_state_ref(const _Stop_state_ref& __other) noexcept

406 : _M_ptr(__other._M_ptr)

407 {

408 if (_M_ptr)

409 _M_ptr->_M_add_owner();

410 }

411

412 _Stop_state_ref(_Stop_state_ref&& __other) noexcept

413 : _M_ptr(__other._M_ptr)

414 {

415 __other._M_ptr = nullptr;

416 }

417

418 _Stop_state_ref&

419 operator=(const _Stop_state_ref& __other) noexcept

420 {

421 if (auto __ptr = __other._M_ptr; __ptr != _M_ptr)

422 {

423 if (__ptr)

424 __ptr->_M_add_owner();

425 if (_M_ptr)

426 _M_ptr->_M_release_ownership();

427 _M_ptr = __ptr;

428 }

429 return *this;

430 }

431

432 _Stop_state_ref&

433 operator=(_Stop_state_ref&& __other) noexcept

434 {

435 _Stop_state_ref(std::move(__other)).swap(*this);

436 return *this;

437 }

438

439 ~_Stop_state_ref()

440 {

441 if (_M_ptr)

442 _M_ptr->_M_release_ownership();

443 }

444

445 void

446 swap(_Stop_state_ref& __other) noexcept

447 { std::swap(_M_ptr, __other._M_ptr); }

448

449 explicit operator bool() const noexcept { return _M_ptr != nullptr; }

450

451 _Stop_state_t* operator->() const noexcept { return _M_ptr; }

452

453#if __cpp_impl_three_way_comparison >= 201907L

454 friend bool

455 operator==(const _Stop_state_ref&, const _Stop_state_ref&) = default;

456#else

457 friend bool

458 operator==(const _Stop_state_ref& __lhs, const _Stop_state_ref& __rhs)

459 noexcept

460 { return __lhs._M_ptr == __rhs._M_ptr; }

461

462 friend bool

463 operator!=(const _Stop_state_ref& __lhs, const _Stop_state_ref& __rhs)

464 noexcept

465 { return __lhs._M_ptr != __rhs._M_ptr; }

466#endif

467

468 private:

469 _Stop_state_t* _M_ptr = nullptr;

470 };

471

472 _Stop_state_ref _M_state;

473

474 explicit

475 stop_token(const _Stop_state_ref& __state) noexcept

476 : _M_state{__state}

477 { }

478 };

479

480

482 {

483 public:

485 { }

486

488 { }

489

491 : _M_state(__other._M_state)

492 {

493 if (_M_state)

494 _M_state->_M_add_ssrc();

495 }

496

498

500 operator=(const stop_source& __other) noexcept

501 {

502 if (_M_state != __other._M_state)

503 {

505 _M_state = __other._M_state;

506 if (_M_state)

507 _M_state->_M_add_ssrc();

508 }

509 return *this;

510 }

511

513 operator=(stop_source&&) noexcept = default;

514

516 {

517 if (_M_state)

518 _M_state->_M_sub_ssrc();

519 }

520

521 [[nodiscard]]

522 bool

523 stop_possible() const noexcept

524 {

525 return static_cast<bool>(_M_state);

526 }

527

528 [[nodiscard]]

529 bool

530 stop_requested() const noexcept

531 {

532 return static_cast<bool>(_M_state) && _M_state->_M_stop_requested();

533 }

534

535 bool

536 request_stop() const noexcept

537 {

538 if (stop_possible())

539 return _M_state->_M_request_stop();

540 return false;

541 }

542

543 [[nodiscard]]

545 get_token() const noexcept

546 {

548 }

549

550 void

552 {

553 _M_state.swap(__other._M_state);

554 }

555

556 [[nodiscard]]

557 friend bool

559 {

560 return __a._M_state == __b._M_state;

561 }

562

563 friend void

565 {

566 __lhs.swap(__rhs);

567 }

568

569 private:

570 stop_token::_Stop_state_ref _M_state;

571 };

572

573

574 template<typename _Callback>

576 {

577 static_assert(is_nothrow_destructible_v<_Callback>);

578 static_assert(is_invocable_v<_Callback>);

579

580 public:

581 using callback_type = _Callback;

582

583 template<typename _Cb,

585 explicit

587 noexcept(is_nothrow_constructible_v<_Callback, _Cb>)

588 : _M_cb(std::forward<_Cb>(__cb))

589 {

590 if (auto __state = __token._M_state)

591 {

592 if (__state->_M_register_callback(&_M_cb))

593 _M_state.swap(__state);

594 }

595 }

596

597 template<typename _Cb,

599 explicit

601 noexcept(is_nothrow_constructible_v<_Callback, _Cb>)

602 : _M_cb(std::forward<_Cb>(__cb))

603 {

604 if (auto& __state = __token._M_state)

605 {

606 if (__state->_M_register_callback(&_M_cb))

607 _M_state.swap(__state);

608 }

609 }

610

612 {

613 if (_M_state)

614 {

615 _M_state->_M_remove_callback(&_M_cb);

616 }

617 }

618

623

624 private:

625 struct _Cb_impl : stop_token::_Stop_cb

626 {

627 template<typename _Cb>

628 explicit

629 _Cb_impl(_Cb&& __cb)

630 : _Stop_cb(&_S_execute),

631 _M_cb(std::forward<_Cb>(__cb))

632 { }

633

634 _Callback _M_cb;

635

636 [[__gnu__::__nonnull__]]

637 static void

638 _S_execute(_Stop_cb* __that) noexcept

639 {

640 _Callback& __cb = static_cast<_Cb_impl*>(__that)->_M_cb;

641 std::forward<_Callback>(__cb)();

642 }

643 };

644

645 _Cb_impl _M_cb;

646 stop_token::_Stop_state_ref _M_state;

647 };

648

649 template<typename _Callback>

651

652_GLIBCXX_END_NAMESPACE_VERSION

653}

654#endif

655#endif

typename enable_if< _Cond, _Tp >::type enable_if_t

Alias template for enable_if.

constexpr std::remove_reference< _Tp >::type && move(_Tp &&__t) noexcept

Convert a value to an rvalue.

memory_order

Enumeration for memory_order.

ISO C++ entities toplevel namespace is std.

void yield() noexcept

Allow the implementation to schedule a different thread.

thread::id get_id() noexcept

The unique identifier of the current thread.

Generic atomic type, primary class template.

Tag type indicating a stop_source should have no shared-stop-state.

Allow testing whether a stop request has been made on a stop_source.

A type that allows a stop request to be made.

A wrapper for callbacks to be run when a stop request is made.