cstdatomic 24 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849
  1. // -*- C++ -*- header.
  2. // Copyright (C) 2008, 2009
  3. // Free Software Foundation, Inc.
  4. //
  5. // This file is part of the GNU ISO C++ Library. This library is free
  6. // software; you can redistribute it and/or modify it under the
  7. // terms of the GNU General Public License as published by the
  8. // Free Software Foundation; either version 3, or (at your option)
  9. // any later version.
  10. // This library is distributed in the hope that it will be useful,
  11. // but WITHOUT ANY WARRANTY; without even the implied warranty of
  12. // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  13. // GNU General Public License for more details.
  14. // Under Section 7 of GPL version 3, you are granted additional
  15. // permissions described in the GCC Runtime Library Exception, version
  16. // 3.1, as published by the Free Software Foundation.
  17. // You should have received a copy of the GNU General Public License and
  18. // a copy of the GCC Runtime Library Exception along with this program;
  19. // see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
  20. // <http://www.gnu.org/licenses/>.
  21. /** @file cstdatomic
  22. * This is a Standard C++ Library file. You should @c #include this file
  23. * in your programs, rather than any of the "*.h" implementation files.
  24. *
  25. * This is the C++ version of the Standard C Library header @c stdatomic.h,
  26. * and its contents are (mostly) the same as that header, but are all
  27. * contained in the namespace @c std (except for names which are defined
  28. * as macros in C).
  29. */
  30. // Based on "C++ Atomic Types and Operations" by Hans Boehm and Lawrence Crowl.
  31. // http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2427.html
  32. #ifndef _GLIBCXX_STDATOMIC
  33. #define _GLIBCXX_STDATOMIC 1
  34. #pragma GCC system_header
  35. #ifndef __GXX_EXPERIMENTAL_CXX0X__
  36. # include <c++0x_warning.h>
  37. #endif
  38. #include <stdatomic.h>
  39. #include <cstddef>
  40. _GLIBCXX_BEGIN_NAMESPACE(std)
  41. /**
  42. * @addtogroup atomics
  43. * @{
  44. */
  45. /// kill_dependency
  46. template<typename _Tp>
  47. inline _Tp
  48. kill_dependency(_Tp __y)
  49. {
  50. _Tp ret(__y);
  51. return ret;
  52. }
  53. inline memory_order
  54. __calculate_memory_order(memory_order __m)
  55. {
  56. const bool __cond1 = __m == memory_order_release;
  57. const bool __cond2 = __m == memory_order_acq_rel;
  58. memory_order __mo1(__cond1 ? memory_order_relaxed : __m);
  59. memory_order __mo2(__cond2 ? memory_order_acquire : __mo1);
  60. return __mo2;
  61. }
  62. //
  63. // Three nested namespaces for atomic implementation details.
  64. //
  65. // The nested namespace inlined into std:: is determined by the value
  66. // of the _GLIBCXX_ATOMIC_PROPERTY macro and the resulting
  67. // ATOMIC_*_LOCK_FREE macros. See file stdatomic.h.
  68. //
  69. // 0 == __atomic0 == Never lock-free
  70. // 1 == __atomic1 == Best available, sometimes lock-free
  71. // 2 == __atomic2 == Always lock-free
  72. #include <bits/atomic_0.h>
  73. #include <bits/atomic_2.h>
  74. /// atomic
  75. /// 29.4.3, Generic atomic type, primary class template.
  76. template<typename _Tp>
  77. struct atomic
  78. {
  79. private:
  80. _Tp _M_i;
  81. public:
  82. atomic() = default;
  83. ~atomic() = default;
  84. atomic(const atomic&) = delete;
  85. atomic& operator=(const atomic&) = delete;
  86. atomic(_Tp __i) : _M_i(__i) { }
  87. operator _Tp() const volatile;
  88. _Tp
  89. operator=(_Tp __i) volatile { store(__i); return __i; }
  90. bool
  91. is_lock_free() const volatile;
  92. void
  93. store(_Tp, memory_order = memory_order_seq_cst) volatile;
  94. _Tp
  95. load(memory_order = memory_order_seq_cst) const volatile;
  96. _Tp
  97. exchange(_Tp __i, memory_order = memory_order_seq_cst) volatile;
  98. bool
  99. compare_exchange_weak(_Tp&, _Tp, memory_order, memory_order) volatile;
  100. bool
  101. compare_exchange_strong(_Tp&, _Tp, memory_order, memory_order) volatile;
  102. bool
  103. compare_exchange_weak(_Tp&, _Tp,
  104. memory_order = memory_order_seq_cst) volatile;
  105. bool
  106. compare_exchange_strong(_Tp&, _Tp,
  107. memory_order = memory_order_seq_cst) volatile;
  108. };
  109. /// Partial specialization for pointer types.
  110. template<typename _Tp>
  111. struct atomic<_Tp*> : atomic_address
  112. {
  113. atomic() = default;
  114. ~atomic() = default;
  115. atomic(const atomic&) = delete;
  116. atomic& operator=(const atomic&) = delete;
  117. atomic(_Tp* __v) : atomic_address(__v) { }
  118. void
  119. store(_Tp*, memory_order = memory_order_seq_cst) volatile;
  120. _Tp*
  121. load(memory_order = memory_order_seq_cst) const volatile;
  122. _Tp*
  123. exchange(_Tp*, memory_order = memory_order_seq_cst) volatile;
  124. bool
  125. compare_exchange_weak(_Tp*&, _Tp*, memory_order, memory_order) volatile;
  126. bool
  127. compare_exchange_strong(_Tp*&, _Tp*, memory_order, memory_order) volatile;
  128. bool
  129. compare_exchange_weak(_Tp*&, _Tp*,
  130. memory_order = memory_order_seq_cst) volatile;
  131. bool
  132. compare_exchange_strong(_Tp*&, _Tp*,
  133. memory_order = memory_order_seq_cst) volatile;
  134. _Tp*
  135. fetch_add(ptrdiff_t, memory_order = memory_order_seq_cst) volatile;
  136. _Tp*
  137. fetch_sub(ptrdiff_t, memory_order = memory_order_seq_cst) volatile;
  138. operator _Tp*() const volatile
  139. { return load(); }
  140. _Tp*
  141. operator=(_Tp* __v) volatile
  142. {
  143. store(__v);
  144. return __v;
  145. }
  146. _Tp*
  147. operator++(int) volatile { return fetch_add(1); }
  148. _Tp*
  149. operator--(int) volatile { return fetch_sub(1); }
  150. _Tp*
  151. operator++() volatile { return fetch_add(1) + 1; }
  152. _Tp*
  153. operator--() volatile { return fetch_sub(1) - 1; }
  154. _Tp*
  155. operator+=(ptrdiff_t __d) volatile
  156. { return fetch_add(__d) + __d; }
  157. _Tp*
  158. operator-=(ptrdiff_t __d) volatile
  159. { return fetch_sub(__d) - __d; }
  160. };
  161. /// Explicit specialization for void*
  162. template<>
  163. struct atomic<void*> : public atomic_address
  164. {
  165. typedef void* __integral_type;
  166. typedef atomic_address __base_type;
  167. atomic() = default;
  168. ~atomic() = default;
  169. atomic(const atomic&) = delete;
  170. atomic& operator=(const atomic&) = delete;
  171. atomic(__integral_type __i) : __base_type(__i) { }
  172. using __base_type::operator __integral_type;
  173. using __base_type::operator=;
  174. };
  175. /// Explicit specialization for bool.
  176. template<>
  177. struct atomic<bool> : public atomic_bool
  178. {
  179. typedef bool __integral_type;
  180. typedef atomic_bool __base_type;
  181. atomic() = default;
  182. ~atomic() = default;
  183. atomic(const atomic&) = delete;
  184. atomic& operator=(const atomic&) = delete;
  185. atomic(__integral_type __i) : __base_type(__i) { }
  186. using __base_type::operator __integral_type;
  187. using __base_type::operator=;
  188. };
  189. /// Explicit specialization for char.
  190. template<>
  191. struct atomic<char> : public atomic_char
  192. {
  193. typedef char __integral_type;
  194. typedef atomic_char __base_type;
  195. atomic() = default;
  196. ~atomic() = default;
  197. atomic(const atomic&) = delete;
  198. atomic& operator=(const atomic&) = delete;
  199. atomic(__integral_type __i) : __base_type(__i) { }
  200. using __base_type::operator __integral_type;
  201. using __base_type::operator=;
  202. };
  203. /// Explicit specialization for signed char.
  204. template<>
  205. struct atomic<signed char> : public atomic_schar
  206. {
  207. typedef signed char __integral_type;
  208. typedef atomic_schar __base_type;
  209. atomic() = default;
  210. ~atomic() = default;
  211. atomic(const atomic&) = delete;
  212. atomic& operator=(const atomic&) = delete;
  213. atomic(__integral_type __i) : __base_type(__i) { }
  214. using __base_type::operator __integral_type;
  215. using __base_type::operator=;
  216. };
  217. /// Explicit specialization for unsigned char.
  218. template<>
  219. struct atomic<unsigned char> : public atomic_uchar
  220. {
  221. typedef unsigned char __integral_type;
  222. typedef atomic_uchar __base_type;
  223. atomic() = default;
  224. ~atomic() = default;
  225. atomic(const atomic&) = delete;
  226. atomic& operator=(const atomic&) = delete;
  227. atomic(__integral_type __i) : __base_type(__i) { }
  228. using __base_type::operator __integral_type;
  229. using __base_type::operator=;
  230. };
  231. /// Explicit specialization for short.
  232. template<>
  233. struct atomic<short> : public atomic_short
  234. {
  235. typedef short __integral_type;
  236. typedef atomic_short __base_type;
  237. atomic() = default;
  238. ~atomic() = default;
  239. atomic(const atomic&) = delete;
  240. atomic& operator=(const atomic&) = delete;
  241. atomic(__integral_type __i) : __base_type(__i) { }
  242. using __base_type::operator __integral_type;
  243. using __base_type::operator=;
  244. };
  245. /// Explicit specialization for unsigned short.
  246. template<>
  247. struct atomic<unsigned short> : public atomic_ushort
  248. {
  249. typedef unsigned short __integral_type;
  250. typedef atomic_ushort __base_type;
  251. atomic() = default;
  252. ~atomic() = default;
  253. atomic(const atomic&) = delete;
  254. atomic& operator=(const atomic&) = delete;
  255. atomic(__integral_type __i) : __base_type(__i) { }
  256. using __base_type::operator __integral_type;
  257. using __base_type::operator=;
  258. };
  259. /// Explicit specialization for int.
  260. template<>
  261. struct atomic<int> : atomic_int
  262. {
  263. typedef int __integral_type;
  264. typedef atomic_int __base_type;
  265. atomic() = default;
  266. ~atomic() = default;
  267. atomic(const atomic&) = delete;
  268. atomic& operator=(const atomic&) = delete;
  269. atomic(__integral_type __i) : __base_type(__i) { }
  270. using __base_type::operator __integral_type;
  271. using __base_type::operator=;
  272. };
  273. /// Explicit specialization for unsigned int.
  274. template<>
  275. struct atomic<unsigned int> : public atomic_uint
  276. {
  277. typedef unsigned int __integral_type;
  278. typedef atomic_uint __base_type;
  279. atomic() = default;
  280. ~atomic() = default;
  281. atomic(const atomic&) = delete;
  282. atomic& operator=(const atomic&) = delete;
  283. atomic(__integral_type __i) : __base_type(__i) { }
  284. using __base_type::operator __integral_type;
  285. using __base_type::operator=;
  286. };
  287. /// Explicit specialization for long.
  288. template<>
  289. struct atomic<long> : public atomic_long
  290. {
  291. typedef long __integral_type;
  292. typedef atomic_long __base_type;
  293. atomic() = default;
  294. ~atomic() = default;
  295. atomic(const atomic&) = delete;
  296. atomic& operator=(const atomic&) = delete;
  297. atomic(__integral_type __i) : __base_type(__i) { }
  298. using __base_type::operator __integral_type;
  299. using __base_type::operator=;
  300. };
  301. /// Explicit specialization for unsigned long.
  302. template<>
  303. struct atomic<unsigned long> : public atomic_ulong
  304. {
  305. typedef unsigned long __integral_type;
  306. typedef atomic_ulong __base_type;
  307. atomic() = default;
  308. ~atomic() = default;
  309. atomic(const atomic&) = delete;
  310. atomic& operator=(const atomic&) = delete;
  311. atomic(__integral_type __i) : __base_type(__i) { }
  312. using __base_type::operator __integral_type;
  313. using __base_type::operator=;
  314. };
  315. /// Explicit specialization for long long.
  316. template<>
  317. struct atomic<long long> : public atomic_llong
  318. {
  319. typedef long long __integral_type;
  320. typedef atomic_llong __base_type;
  321. atomic() = default;
  322. ~atomic() = default;
  323. atomic(const atomic&) = delete;
  324. atomic& operator=(const atomic&) = delete;
  325. atomic(__integral_type __i) : __base_type(__i) { }
  326. using __base_type::operator __integral_type;
  327. using __base_type::operator=;
  328. };
  329. /// Explicit specialization for unsigned long long.
  330. template<>
  331. struct atomic<unsigned long long> : public atomic_ullong
  332. {
  333. typedef unsigned long long __integral_type;
  334. typedef atomic_ullong __base_type;
  335. atomic() = default;
  336. ~atomic() = default;
  337. atomic(const atomic&) = delete;
  338. atomic& operator=(const atomic&) = delete;
  339. atomic(__integral_type __i) : __base_type(__i) { }
  340. using __base_type::operator __integral_type;
  341. using __base_type::operator=;
  342. };
  343. /// Explicit specialization for wchar_t.
  344. template<>
  345. struct atomic<wchar_t> : public atomic_wchar_t
  346. {
  347. typedef wchar_t __integral_type;
  348. typedef atomic_wchar_t __base_type;
  349. atomic() = default;
  350. ~atomic() = default;
  351. atomic(const atomic&) = delete;
  352. atomic& operator=(const atomic&) = delete;
  353. atomic(__integral_type __i) : __base_type(__i) { }
  354. using __base_type::operator __integral_type;
  355. using __base_type::operator=;
  356. };
  357. /// Explicit specialization for char16_t.
  358. template<>
  359. struct atomic<char16_t> : public atomic_char16_t
  360. {
  361. typedef char16_t __integral_type;
  362. typedef atomic_char16_t __base_type;
  363. atomic() = default;
  364. ~atomic() = default;
  365. atomic(const atomic&) = delete;
  366. atomic& operator=(const atomic&) = delete;
  367. atomic(__integral_type __i) : __base_type(__i) { }
  368. using __base_type::operator __integral_type;
  369. using __base_type::operator=;
  370. };
  371. /// Explicit specialization for char32_t.
  372. template<>
  373. struct atomic<char32_t> : public atomic_char32_t
  374. {
  375. typedef char32_t __integral_type;
  376. typedef atomic_char32_t __base_type;
  377. atomic() = default;
  378. ~atomic() = default;
  379. atomic(const atomic&) = delete;
  380. atomic& operator=(const atomic&) = delete;
  381. atomic(__integral_type __i) : __base_type(__i) { }
  382. using __base_type::operator __integral_type;
  383. using __base_type::operator=;
  384. };
  385. template<typename _Tp>
  386. _Tp*
  387. atomic<_Tp*>::load(memory_order __m) const volatile
  388. { return static_cast<_Tp*>(atomic_address::load(__m)); }
  389. template<typename _Tp>
  390. _Tp*
  391. atomic<_Tp*>::exchange(_Tp* __v, memory_order __m) volatile
  392. { return static_cast<_Tp*>(atomic_address::exchange(__v, __m)); }
  393. template<typename _Tp>
  394. bool
  395. atomic<_Tp*>::compare_exchange_weak(_Tp*& __r, _Tp* __v, memory_order __m1,
  396. memory_order __m2) volatile
  397. {
  398. void** __vr = reinterpret_cast<void**>(&__r);
  399. void* __vv = static_cast<void*>(__v);
  400. return atomic_address::compare_exchange_weak(*__vr, __vv, __m1, __m2);
  401. }
  402. template<typename _Tp>
  403. bool
  404. atomic<_Tp*>::compare_exchange_strong(_Tp*& __r, _Tp* __v,
  405. memory_order __m1,
  406. memory_order __m2) volatile
  407. {
  408. void** __vr = reinterpret_cast<void**>(&__r);
  409. void* __vv = static_cast<void*>(__v);
  410. return atomic_address::compare_exchange_strong(*__vr, __vv, __m1, __m2);
  411. }
  412. template<typename _Tp>
  413. bool
  414. atomic<_Tp*>::compare_exchange_weak(_Tp*& __r, _Tp* __v,
  415. memory_order __m) volatile
  416. {
  417. return compare_exchange_weak(__r, __v, __m,
  418. __calculate_memory_order(__m));
  419. }
  420. template<typename _Tp>
  421. bool
  422. atomic<_Tp*>::compare_exchange_strong(_Tp*& __r, _Tp* __v,
  423. memory_order __m) volatile
  424. {
  425. return compare_exchange_strong(__r, __v, __m,
  426. __calculate_memory_order(__m));
  427. }
  428. template<typename _Tp>
  429. _Tp*
  430. atomic<_Tp*>::fetch_add(ptrdiff_t __d, memory_order __m) volatile
  431. {
  432. void* __p = atomic_fetch_add_explicit(this, sizeof(_Tp) * __d, __m);
  433. return static_cast<_Tp*>(__p);
  434. }
  435. template<typename _Tp>
  436. _Tp*
  437. atomic<_Tp*>::fetch_sub(ptrdiff_t __d, memory_order __m) volatile
  438. {
  439. void* __p = atomic_fetch_sub_explicit(this, sizeof(_Tp) * __d, __m);
  440. return static_cast<_Tp*>(__p);
  441. }
  442. // Convenience function definitions, atomic_flag.
  443. inline bool
  444. atomic_flag_test_and_set_explicit(volatile atomic_flag* __a, memory_order __m)
  445. { return __a->test_and_set(__m); }
  446. inline void
  447. atomic_flag_clear_explicit(volatile atomic_flag* __a, memory_order __m)
  448. { return __a->clear(__m); }
  449. // Convenience function definitions, atomic_address.
  450. inline bool
  451. atomic_is_lock_free(const volatile atomic_address* __a)
  452. { return __a->is_lock_free(); }
  453. inline void
  454. atomic_store(volatile atomic_address* __a, void* __v)
  455. { __a->store(__v); }
  456. inline void
  457. atomic_store_explicit(volatile atomic_address* __a, void* __v,
  458. memory_order __m)
  459. { __a->store(__v, __m); }
  460. inline void*
  461. atomic_load(const volatile atomic_address* __a)
  462. { return __a->load(); }
  463. inline void*
  464. atomic_load_explicit(const volatile atomic_address* __a, memory_order __m)
  465. { return __a->load(__m); }
  466. inline void*
  467. atomic_exchange(volatile atomic_address* __a, void* __v)
  468. { return __a->exchange(__v); }
  469. inline void*
  470. atomic_exchange_explicit(volatile atomic_address* __a, void* __v,
  471. memory_order __m)
  472. { return __a->exchange(__v, __m); }
  473. inline bool
  474. atomic_compare_exchange_weak(volatile atomic_address* __a,
  475. void** __v1, void* __v2)
  476. {
  477. return __a->compare_exchange_weak(*__v1, __v2, memory_order_seq_cst,
  478. memory_order_seq_cst);
  479. }
  480. inline bool
  481. atomic_compare_exchange_strong(volatile atomic_address* __a,
  482. void** __v1, void* __v2)
  483. {
  484. return __a->compare_exchange_strong(*__v1, __v2, memory_order_seq_cst,
  485. memory_order_seq_cst);
  486. }
  487. inline bool
  488. atomic_compare_exchange_weak_explicit(volatile atomic_address* __a,
  489. void** __v1, void* __v2,
  490. memory_order __m1, memory_order __m2)
  491. { return __a->compare_exchange_weak(*__v1, __v2, __m1, __m2); }
  492. inline bool
  493. atomic_compare_exchange_strong_explicit(volatile atomic_address* __a,
  494. void** __v1, void* __v2,
  495. memory_order __m1, memory_order __m2)
  496. { return __a->compare_exchange_strong(*__v1, __v2, __m1, __m2); }
  497. inline void*
  498. atomic_fetch_add_explicit(volatile atomic_address* __a, ptrdiff_t __d,
  499. memory_order __m)
  500. { return __a->fetch_add(__d, __m); }
  501. inline void*
  502. atomic_fetch_add(volatile atomic_address* __a, ptrdiff_t __d)
  503. { return __a->fetch_add(__d); }
  504. inline void*
  505. atomic_fetch_sub_explicit(volatile atomic_address* __a, ptrdiff_t __d,
  506. memory_order __m)
  507. { return __a->fetch_sub(__d, __m); }
  508. inline void*
  509. atomic_fetch_sub(volatile atomic_address* __a, ptrdiff_t __d)
  510. { return __a->fetch_sub(__d); }
  511. // Convenience function definitions, atomic_bool.
  512. inline bool
  513. atomic_is_lock_free(const volatile atomic_bool* __a)
  514. { return __a->is_lock_free(); }
  515. inline void
  516. atomic_store(volatile atomic_bool* __a, bool __i)
  517. { __a->store(__i); }
  518. inline void
  519. atomic_store_explicit(volatile atomic_bool* __a, bool __i, memory_order __m)
  520. { __a->store(__i, __m); }
  521. inline bool
  522. atomic_load(const volatile atomic_bool* __a)
  523. { return __a->load(); }
  524. inline bool
  525. atomic_load_explicit(const volatile atomic_bool* __a, memory_order __m)
  526. { return __a->load(__m); }
  527. inline bool
  528. atomic_exchange(volatile atomic_bool* __a, bool __i)
  529. { return __a->exchange(__i); }
  530. inline bool
  531. atomic_exchange_explicit(volatile atomic_bool* __a, bool __i,
  532. memory_order __m)
  533. { return __a->exchange(__i, __m); }
  534. inline bool
  535. atomic_compare_exchange_weak(volatile atomic_bool* __a, bool* __i1, bool __i2)
  536. {
  537. return __a->compare_exchange_weak(*__i1, __i2, memory_order_seq_cst,
  538. memory_order_seq_cst);
  539. }
  540. inline bool
  541. atomic_compare_exchange_strong(volatile atomic_bool* __a,
  542. bool* __i1, bool __i2)
  543. {
  544. return __a->compare_exchange_strong(*__i1, __i2, memory_order_seq_cst,
  545. memory_order_seq_cst);
  546. }
  547. inline bool
  548. atomic_compare_exchange_weak_explicit(volatile atomic_bool* __a, bool* __i1,
  549. bool __i2, memory_order __m1,
  550. memory_order __m2)
  551. { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
  552. inline bool
  553. atomic_compare_exchange_strong_explicit(volatile atomic_bool* __a,
  554. bool* __i1, bool __i2,
  555. memory_order __m1, memory_order __m2)
  556. { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
  557. // Free standing functions. Template argument should be constricted
  558. // to intergral types as specified in the standard.
  559. template<typename _ITp>
  560. inline void
  561. atomic_store_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
  562. memory_order __m)
  563. { __a->store(__i, __m); }
  564. template<typename _ITp>
  565. inline _ITp
  566. atomic_load_explicit(const volatile __atomic_base<_ITp>* __a,
  567. memory_order __m)
  568. { return __a->load(__m); }
  569. template<typename _ITp>
  570. inline _ITp
  571. atomic_exchange_explicit(volatile __atomic_base<_ITp>* __a,
  572. _ITp __i, memory_order __m)
  573. { return __a->exchange(__i, __m); }
  574. template<typename _ITp>
  575. inline bool
  576. atomic_compare_exchange_weak_explicit(volatile __atomic_base<_ITp>* __a,
  577. _ITp* __i1, _ITp __i2,
  578. memory_order __m1, memory_order __m2)
  579. { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
  580. template<typename _ITp>
  581. inline bool
  582. atomic_compare_exchange_strong_explicit(volatile __atomic_base<_ITp>* __a,
  583. _ITp* __i1, _ITp __i2,
  584. memory_order __m1,
  585. memory_order __m2)
  586. { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
  587. template<typename _ITp>
  588. inline _ITp
  589. atomic_fetch_add_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
  590. memory_order __m)
  591. { return __a->fetch_add(__i, __m); }
  592. template<typename _ITp>
  593. inline _ITp
  594. atomic_fetch_sub_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
  595. memory_order __m)
  596. { return __a->fetch_sub(__i, __m); }
  597. template<typename _ITp>
  598. inline _ITp
  599. atomic_fetch_and_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
  600. memory_order __m)
  601. { return __a->fetch_and(__i, __m); }
  602. template<typename _ITp>
  603. inline _ITp
  604. atomic_fetch_or_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
  605. memory_order __m)
  606. { return __a->fetch_or(__i, __m); }
  607. template<typename _ITp>
  608. inline _ITp
  609. atomic_fetch_xor_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
  610. memory_order __m)
  611. { return __a->fetch_xor(__i, __m); }
  612. template<typename _ITp>
  613. inline bool
  614. atomic_is_lock_free(const volatile __atomic_base<_ITp>* __a)
  615. { return __a->is_lock_free(); }
  616. template<typename _ITp>
  617. inline void
  618. atomic_store(volatile __atomic_base<_ITp>* __a, _ITp __i)
  619. { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
  620. template<typename _ITp>
  621. inline _ITp
  622. atomic_load(const volatile __atomic_base<_ITp>* __a)
  623. { return atomic_load_explicit(__a, memory_order_seq_cst); }
  624. template<typename _ITp>
  625. inline _ITp
  626. atomic_exchange(volatile __atomic_base<_ITp>* __a, _ITp __i)
  627. { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
  628. template<typename _ITp>
  629. inline bool
  630. atomic_compare_exchange_weak(volatile __atomic_base<_ITp>* __a,
  631. _ITp* __i1, _ITp __i2)
  632. {
  633. return atomic_compare_exchange_weak_explicit(__a, __i1, __i2,
  634. memory_order_seq_cst,
  635. memory_order_seq_cst);
  636. }
  637. template<typename _ITp>
  638. inline bool
  639. atomic_compare_exchange_strong(volatile __atomic_base<_ITp>* __a,
  640. _ITp* __i1, _ITp __i2)
  641. {
  642. return atomic_compare_exchange_strong_explicit(__a, __i1, __i2,
  643. memory_order_seq_cst,
  644. memory_order_seq_cst);
  645. }
  646. template<typename _ITp>
  647. inline _ITp
  648. atomic_fetch_add(volatile __atomic_base<_ITp>* __a, _ITp __i)
  649. { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
  650. template<typename _ITp>
  651. inline _ITp
  652. atomic_fetch_sub(volatile __atomic_base<_ITp>* __a, _ITp __i)
  653. { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
  654. template<typename _ITp>
  655. inline _ITp
  656. atomic_fetch_and(volatile __atomic_base<_ITp>* __a, _ITp __i)
  657. { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
  658. template<typename _ITp>
  659. inline _ITp
  660. atomic_fetch_or(volatile __atomic_base<_ITp>* __a, _ITp __i)
  661. { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
  662. template<typename _ITp>
  663. inline _ITp
  664. atomic_fetch_xor(volatile __atomic_base<_ITp>* __a, _ITp __i)
  665. { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
  666. // @} group atomics
  667. _GLIBCXX_END_NAMESPACE
  668. #endif