extra_ops_gcc_x86.hpp 67 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957
  1. /*
  2. * Distributed under the Boost Software License, Version 1.0.
  3. * (See accompanying file LICENSE_1_0.txt or copy at
  4. * http://www.boost.org/LICENSE_1_0.txt)
  5. *
  6. * Copyright (c) 2015-2025 Andrey Semashev
  7. */
  8. /*!
  9. * \file atomic/detail/extra_ops_gcc_x86.hpp
  10. *
  11. * This header contains implementation of the extra atomic operations for x86.
  12. */
  13. #ifndef BOOST_ATOMIC_DETAIL_EXTRA_OPS_GCC_X86_HPP_INCLUDED_
  14. #define BOOST_ATOMIC_DETAIL_EXTRA_OPS_GCC_X86_HPP_INCLUDED_
  15. #include <cstddef>
  16. #include <cstdint>
  17. #include <boost/memory_order.hpp>
  18. #include <boost/atomic/detail/config.hpp>
  19. #include <boost/atomic/detail/storage_traits.hpp>
  20. #include <boost/atomic/detail/extra_operations_fwd.hpp>
  21. #include <boost/atomic/detail/extra_ops_generic.hpp>
  22. #include <boost/atomic/detail/header.hpp>
  23. #ifdef BOOST_HAS_PRAGMA_ONCE
  24. #pragma once
  25. #endif
  26. namespace boost {
  27. namespace atomics {
  28. namespace detail {
  29. template< typename Base, bool Signed >
  30. struct extra_operations< Base, 1u, Signed, true > :
  31. public extra_operations_generic< Base, 1u, Signed >
  32. {
  33. using base_type = extra_operations_generic< Base, 1u, Signed >;
  34. using storage_type = typename base_type::storage_type;
  35. using temp_storage_type = typename storage_traits< 4u >::type;
  36. #define BOOST_ATOMIC_DETAIL_CAS_LOOP(op, original, result)\
  37. __asm__ __volatile__\
  38. (\
  39. ".align 16\n\t"\
  40. "1: movzbl %[orig], %2\n\t"\
  41. op " %b2\n\t"\
  42. "lock; cmpxchgb %b2, %[storage]\n\t"\
  43. "jne 1b"\
  44. : [orig] "+a" (original), [storage] "+m" (storage), "=&q" (result)\
  45. : \
  46. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
  47. )
  48. static BOOST_FORCEINLINE storage_type fetch_negate(storage_type volatile& storage, memory_order order) noexcept
  49. {
  50. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  51. storage_type original = storage;
  52. temp_storage_type result;
  53. BOOST_ATOMIC_DETAIL_CAS_LOOP("negb", original, result);
  54. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  55. return original;
  56. }
  57. static BOOST_FORCEINLINE storage_type fetch_complement(storage_type volatile& storage, memory_order order) noexcept
  58. {
  59. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  60. storage_type original = storage;
  61. temp_storage_type result;
  62. BOOST_ATOMIC_DETAIL_CAS_LOOP("notb", original, result);
  63. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  64. return original;
  65. }
  66. static BOOST_FORCEINLINE storage_type negate(storage_type volatile& storage, memory_order order) noexcept
  67. {
  68. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  69. storage_type original = storage;
  70. temp_storage_type result;
  71. BOOST_ATOMIC_DETAIL_CAS_LOOP("negb", original, result);
  72. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  73. return static_cast< storage_type >(result);
  74. }
  75. static BOOST_FORCEINLINE storage_type bitwise_complement(storage_type volatile& storage, memory_order order) noexcept
  76. {
  77. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  78. storage_type original = storage;
  79. temp_storage_type result;
  80. BOOST_ATOMIC_DETAIL_CAS_LOOP("notb", original, result);
  81. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  82. return static_cast< storage_type >(result);
  83. }
  84. #undef BOOST_ATOMIC_DETAIL_CAS_LOOP
  85. #define BOOST_ATOMIC_DETAIL_CAS_LOOP(op, argument, original, result)\
  86. __asm__ __volatile__\
  87. (\
  88. ".align 16\n\t"\
  89. "1: mov %[arg], %2\n\t"\
  90. op " %%al, %b2\n\t"\
  91. "lock; cmpxchgb %b2, %[storage]\n\t"\
  92. "jne 1b"\
  93. : [orig] "+a" (original), [storage] "+m" (storage), "=&q" (result)\
  94. : [arg] "ir" ((temp_storage_type)argument)\
  95. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
  96. )
  97. static BOOST_FORCEINLINE storage_type bitwise_and(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  98. {
  99. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  100. storage_type original = storage;
  101. temp_storage_type result;
  102. BOOST_ATOMIC_DETAIL_CAS_LOOP("andb", v, original, result);
  103. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  104. return static_cast< storage_type >(result);
  105. }
  106. static BOOST_FORCEINLINE storage_type bitwise_or(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  107. {
  108. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  109. storage_type original = storage;
  110. temp_storage_type result;
  111. BOOST_ATOMIC_DETAIL_CAS_LOOP("orb", v, original, result);
  112. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  113. return static_cast< storage_type >(result);
  114. }
  115. static BOOST_FORCEINLINE storage_type bitwise_xor(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  116. {
  117. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  118. storage_type original = storage;
  119. temp_storage_type result;
  120. BOOST_ATOMIC_DETAIL_CAS_LOOP("xorb", v, original, result);
  121. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  122. return static_cast< storage_type >(result);
  123. }
  124. #undef BOOST_ATOMIC_DETAIL_CAS_LOOP
  125. static BOOST_FORCEINLINE bool negate_and_test(storage_type volatile& storage, memory_order order) noexcept
  126. {
  127. return !!negate(storage, order);
  128. }
  129. static BOOST_FORCEINLINE bool complement_and_test(storage_type volatile& storage, memory_order order) noexcept
  130. {
  131. return !!bitwise_complement(storage, order);
  132. }
  133. static BOOST_FORCEINLINE void opaque_add(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  134. {
  135. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  136. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  137. {
  138. __asm__ __volatile__
  139. (
  140. "lock; incb %[storage]\n\t"
  141. : [storage] "+m" (storage)
  142. :
  143. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  144. );
  145. }
  146. else
  147. {
  148. __asm__ __volatile__
  149. (
  150. "lock; addb %[argument], %[storage]\n\t"
  151. : [storage] "+m" (storage)
  152. : [argument] "iq" (v)
  153. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  154. );
  155. }
  156. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  157. }
  158. static BOOST_FORCEINLINE void opaque_sub(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  159. {
  160. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  161. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  162. {
  163. __asm__ __volatile__
  164. (
  165. "lock; decb %[storage]\n\t"
  166. : [storage] "+m" (storage)
  167. :
  168. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  169. );
  170. }
  171. else
  172. {
  173. __asm__ __volatile__
  174. (
  175. "lock; subb %[argument], %[storage]\n\t"
  176. : [storage] "+m" (storage)
  177. : [argument] "iq" (v)
  178. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  179. );
  180. }
  181. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  182. }
  183. static BOOST_FORCEINLINE void opaque_negate(storage_type volatile& storage, memory_order order) noexcept
  184. {
  185. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  186. __asm__ __volatile__
  187. (
  188. "lock; negb %[storage]\n\t"
  189. : [storage] "+m" (storage)
  190. :
  191. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  192. );
  193. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  194. }
  195. static BOOST_FORCEINLINE void opaque_and(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  196. {
  197. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  198. __asm__ __volatile__
  199. (
  200. "lock; andb %[argument], %[storage]\n\t"
  201. : [storage] "+m" (storage)
  202. : [argument] "iq" (v)
  203. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  204. );
  205. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  206. }
  207. static BOOST_FORCEINLINE void opaque_or(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  208. {
  209. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  210. __asm__ __volatile__
  211. (
  212. "lock; orb %[argument], %[storage]\n\t"
  213. : [storage] "+m" (storage)
  214. : [argument] "iq" (v)
  215. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  216. );
  217. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  218. }
  219. static BOOST_FORCEINLINE void opaque_xor(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  220. {
  221. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  222. __asm__ __volatile__
  223. (
  224. "lock; xorb %[argument], %[storage]\n\t"
  225. : [storage] "+m" (storage)
  226. : [argument] "iq" (v)
  227. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  228. );
  229. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  230. }
  231. static BOOST_FORCEINLINE void opaque_complement(storage_type volatile& storage, memory_order order) noexcept
  232. {
  233. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  234. __asm__ __volatile__
  235. (
  236. "lock; notb %[storage]\n\t"
  237. : [storage] "+m" (storage)
  238. :
  239. : "memory"
  240. );
  241. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  242. }
  243. static BOOST_FORCEINLINE bool add_and_test(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  244. {
  245. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  246. bool res;
  247. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  248. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  249. {
  250. __asm__ __volatile__
  251. (
  252. "lock; incb %[storage]\n\t"
  253. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  254. :
  255. : "memory"
  256. );
  257. }
  258. else
  259. {
  260. __asm__ __volatile__
  261. (
  262. "lock; addb %[argument], %[storage]\n\t"
  263. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  264. : [argument] "iq" (v)
  265. : "memory"
  266. );
  267. }
  268. #else
  269. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  270. {
  271. __asm__ __volatile__
  272. (
  273. "lock; incb %[storage]\n\t"
  274. "setnz %[result]\n\t"
  275. : [storage] "+m" (storage), [result] "=q" (res)
  276. :
  277. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  278. );
  279. }
  280. else
  281. {
  282. __asm__ __volatile__
  283. (
  284. "lock; addb %[argument], %[storage]\n\t"
  285. "setnz %[result]\n\t"
  286. : [storage] "+m" (storage), [result] "=q" (res)
  287. : [argument] "iq" (v)
  288. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  289. );
  290. }
  291. #endif
  292. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  293. return res;
  294. }
  295. static BOOST_FORCEINLINE bool sub_and_test(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  296. {
  297. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  298. bool res;
  299. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  300. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  301. {
  302. __asm__ __volatile__
  303. (
  304. "lock; decb %[storage]\n\t"
  305. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  306. :
  307. : "memory"
  308. );
  309. }
  310. else
  311. {
  312. __asm__ __volatile__
  313. (
  314. "lock; subb %[argument], %[storage]\n\t"
  315. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  316. : [argument] "iq" (v)
  317. : "memory"
  318. );
  319. }
  320. #else
  321. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  322. {
  323. __asm__ __volatile__
  324. (
  325. "lock; decb %[storage]\n\t"
  326. "setnz %[result]\n\t"
  327. : [storage] "+m" (storage), [result] "=q" (res)
  328. :
  329. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  330. );
  331. }
  332. else
  333. {
  334. __asm__ __volatile__
  335. (
  336. "lock; subb %[argument], %[storage]\n\t"
  337. "setnz %[result]\n\t"
  338. : [storage] "+m" (storage), [result] "=q" (res)
  339. : [argument] "iq" (v)
  340. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  341. );
  342. }
  343. #endif
  344. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  345. return res;
  346. }
  347. static BOOST_FORCEINLINE bool and_and_test(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  348. {
  349. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  350. bool res;
  351. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  352. __asm__ __volatile__
  353. (
  354. "lock; andb %[argument], %[storage]\n\t"
  355. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  356. : [argument] "iq" (v)
  357. : "memory"
  358. );
  359. #else
  360. __asm__ __volatile__
  361. (
  362. "lock; andb %[argument], %[storage]\n\t"
  363. "setnz %[result]\n\t"
  364. : [storage] "+m" (storage), [result] "=q" (res)
  365. : [argument] "iq" (v)
  366. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  367. );
  368. #endif
  369. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  370. return res;
  371. }
  372. static BOOST_FORCEINLINE bool or_and_test(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  373. {
  374. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  375. bool res;
  376. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  377. __asm__ __volatile__
  378. (
  379. "lock; orb %[argument], %[storage]\n\t"
  380. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  381. : [argument] "iq" (v)
  382. : "memory"
  383. );
  384. #else
  385. __asm__ __volatile__
  386. (
  387. "lock; orb %[argument], %[storage]\n\t"
  388. "setnz %[result]\n\t"
  389. : [storage] "+m" (storage), [result] "=q" (res)
  390. : [argument] "iq" (v)
  391. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  392. );
  393. #endif
  394. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  395. return res;
  396. }
  397. static BOOST_FORCEINLINE bool xor_and_test(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  398. {
  399. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  400. bool res;
  401. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  402. __asm__ __volatile__
  403. (
  404. "lock; xorb %[argument], %[storage]\n\t"
  405. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  406. : [argument] "iq" (v)
  407. : "memory"
  408. );
  409. #else
  410. __asm__ __volatile__
  411. (
  412. "lock; xorb %[argument], %[storage]\n\t"
  413. "setnz %[result]\n\t"
  414. : [storage] "+m" (storage), [result] "=q" (res)
  415. : [argument] "iq" (v)
  416. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  417. );
  418. #endif
  419. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  420. return res;
  421. }
  422. };
  423. template< typename Base, bool Signed >
  424. struct extra_operations< Base, 2u, Signed, true > :
  425. public extra_operations_generic< Base, 2u, Signed >
  426. {
  427. using base_type = extra_operations_generic< Base, 2u, Signed >;
  428. using storage_type = typename base_type::storage_type;
  429. using temp_storage_type = typename storage_traits< 4u >::type;
  430. #define BOOST_ATOMIC_DETAIL_CAS_LOOP(op, original, result)\
  431. __asm__ __volatile__\
  432. (\
  433. ".align 16\n\t"\
  434. "1: movzwl %[orig], %2\n\t"\
  435. op " %w2\n\t"\
  436. "lock; cmpxchgw %w2, %[storage]\n\t"\
  437. "jne 1b"\
  438. : [orig] "+a" (original), [storage] "+m" (storage), "=&q" (result)\
  439. : \
  440. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
  441. )
  442. static BOOST_FORCEINLINE storage_type fetch_negate(storage_type volatile& storage, memory_order order) noexcept
  443. {
  444. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  445. storage_type original = storage;
  446. temp_storage_type result;
  447. BOOST_ATOMIC_DETAIL_CAS_LOOP("negw", original, result);
  448. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  449. return original;
  450. }
  451. static BOOST_FORCEINLINE storage_type fetch_complement(storage_type volatile& storage, memory_order order) noexcept
  452. {
  453. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  454. storage_type original = storage;
  455. temp_storage_type result;
  456. BOOST_ATOMIC_DETAIL_CAS_LOOP("notw", original, result);
  457. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  458. return original;
  459. }
  460. static BOOST_FORCEINLINE storage_type negate(storage_type volatile& storage, memory_order order) noexcept
  461. {
  462. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  463. storage_type original = storage;
  464. temp_storage_type result;
  465. BOOST_ATOMIC_DETAIL_CAS_LOOP("negw", original, result);
  466. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  467. return static_cast< storage_type >(result);
  468. }
  469. static BOOST_FORCEINLINE storage_type bitwise_complement(storage_type volatile& storage, memory_order order) noexcept
  470. {
  471. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  472. storage_type original = storage;
  473. temp_storage_type result;
  474. BOOST_ATOMIC_DETAIL_CAS_LOOP("notw", original, result);
  475. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  476. return static_cast< storage_type >(result);
  477. }
  478. #undef BOOST_ATOMIC_DETAIL_CAS_LOOP
  479. #define BOOST_ATOMIC_DETAIL_CAS_LOOP(op, argument, original, result)\
  480. __asm__ __volatile__\
  481. (\
  482. ".align 16\n\t"\
  483. "1: mov %[arg], %2\n\t"\
  484. op " %%ax, %w2\n\t"\
  485. "lock; cmpxchgw %w2, %[storage]\n\t"\
  486. "jne 1b"\
  487. : [orig] "+a" (original), [storage] "+m" (storage), "=&q" (result)\
  488. : [arg] "ir" ((temp_storage_type)argument)\
  489. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
  490. )
  491. static BOOST_FORCEINLINE storage_type bitwise_and(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  492. {
  493. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  494. storage_type original = storage;
  495. temp_storage_type result;
  496. BOOST_ATOMIC_DETAIL_CAS_LOOP("andw", v, original, result);
  497. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  498. return static_cast< storage_type >(result);
  499. }
  500. static BOOST_FORCEINLINE storage_type bitwise_or(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  501. {
  502. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  503. storage_type original = storage;
  504. temp_storage_type result;
  505. BOOST_ATOMIC_DETAIL_CAS_LOOP("orw", v, original, result);
  506. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  507. return static_cast< storage_type >(result);
  508. }
  509. static BOOST_FORCEINLINE storage_type bitwise_xor(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  510. {
  511. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  512. storage_type original = storage;
  513. temp_storage_type result;
  514. BOOST_ATOMIC_DETAIL_CAS_LOOP("xorw", v, original, result);
  515. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  516. return static_cast< storage_type >(result);
  517. }
  518. #undef BOOST_ATOMIC_DETAIL_CAS_LOOP
  519. static BOOST_FORCEINLINE bool negate_and_test(storage_type volatile& storage, memory_order order) noexcept
  520. {
  521. return !!negate(storage, order);
  522. }
  523. static BOOST_FORCEINLINE bool complement_and_test(storage_type volatile& storage, memory_order order) noexcept
  524. {
  525. return !!bitwise_complement(storage, order);
  526. }
  527. static BOOST_FORCEINLINE void opaque_add(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  528. {
  529. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  530. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  531. {
  532. __asm__ __volatile__
  533. (
  534. "lock; incw %[storage]\n\t"
  535. : [storage] "+m" (storage)
  536. :
  537. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  538. );
  539. }
  540. else
  541. {
  542. __asm__ __volatile__
  543. (
  544. "lock; addw %[argument], %[storage]\n\t"
  545. : [storage] "+m" (storage)
  546. : [argument] "iq" (v)
  547. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  548. );
  549. }
  550. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  551. }
  552. static BOOST_FORCEINLINE void opaque_sub(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  553. {
  554. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  555. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  556. {
  557. __asm__ __volatile__
  558. (
  559. "lock; decw %[storage]\n\t"
  560. : [storage] "+m" (storage)
  561. :
  562. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  563. );
  564. }
  565. else
  566. {
  567. __asm__ __volatile__
  568. (
  569. "lock; subw %[argument], %[storage]\n\t"
  570. : [storage] "+m" (storage)
  571. : [argument] "iq" (v)
  572. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  573. );
  574. }
  575. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  576. }
  577. static BOOST_FORCEINLINE void opaque_negate(storage_type volatile& storage, memory_order order) noexcept
  578. {
  579. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  580. __asm__ __volatile__
  581. (
  582. "lock; negw %[storage]\n\t"
  583. : [storage] "+m" (storage)
  584. :
  585. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  586. );
  587. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  588. }
  589. static BOOST_FORCEINLINE void opaque_and(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  590. {
  591. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  592. __asm__ __volatile__
  593. (
  594. "lock; andw %[argument], %[storage]\n\t"
  595. : [storage] "+m" (storage)
  596. : [argument] "iq" (v)
  597. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  598. );
  599. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  600. }
  601. static BOOST_FORCEINLINE void opaque_or(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  602. {
  603. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  604. __asm__ __volatile__
  605. (
  606. "lock; orw %[argument], %[storage]\n\t"
  607. : [storage] "+m" (storage)
  608. : [argument] "iq" (v)
  609. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  610. );
  611. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  612. }
  613. static BOOST_FORCEINLINE void opaque_xor(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  614. {
  615. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  616. __asm__ __volatile__
  617. (
  618. "lock; xorw %[argument], %[storage]\n\t"
  619. : [storage] "+m" (storage)
  620. : [argument] "iq" (v)
  621. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  622. );
  623. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  624. }
  625. static BOOST_FORCEINLINE void opaque_complement(storage_type volatile& storage, memory_order order) noexcept
  626. {
  627. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  628. __asm__ __volatile__
  629. (
  630. "lock; notw %[storage]\n\t"
  631. : [storage] "+m" (storage)
  632. :
  633. : "memory"
  634. );
  635. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  636. }
  637. static BOOST_FORCEINLINE bool add_and_test(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  638. {
  639. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  640. bool res;
  641. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  642. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  643. {
  644. __asm__ __volatile__
  645. (
  646. "lock; incw %[storage]\n\t"
  647. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  648. :
  649. : "memory"
  650. );
  651. }
  652. else
  653. {
  654. __asm__ __volatile__
  655. (
  656. "lock; addw %[argument], %[storage]\n\t"
  657. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  658. : [argument] "iq" (v)
  659. : "memory"
  660. );
  661. }
  662. #else
  663. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  664. {
  665. __asm__ __volatile__
  666. (
  667. "lock; incw %[storage]\n\t"
  668. "setnz %[result]\n\t"
  669. : [storage] "+m" (storage), [result] "=q" (res)
  670. :
  671. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  672. );
  673. }
  674. else
  675. {
  676. __asm__ __volatile__
  677. (
  678. "lock; addw %[argument], %[storage]\n\t"
  679. "setnz %[result]\n\t"
  680. : [storage] "+m" (storage), [result] "=q" (res)
  681. : [argument] "iq" (v)
  682. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  683. );
  684. }
  685. #endif
  686. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  687. return res;
  688. }
  689. static BOOST_FORCEINLINE bool sub_and_test(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  690. {
  691. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  692. bool res;
  693. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  694. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  695. {
  696. __asm__ __volatile__
  697. (
  698. "lock; decw %[storage]\n\t"
  699. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  700. :
  701. : "memory"
  702. );
  703. }
  704. else
  705. {
  706. __asm__ __volatile__
  707. (
  708. "lock; subw %[argument], %[storage]\n\t"
  709. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  710. : [argument] "iq" (v)
  711. : "memory"
  712. );
  713. }
  714. #else
  715. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  716. {
  717. __asm__ __volatile__
  718. (
  719. "lock; decw %[storage]\n\t"
  720. "setnz %[result]\n\t"
  721. : [storage] "+m" (storage), [result] "=q" (res)
  722. :
  723. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  724. );
  725. }
  726. else
  727. {
  728. __asm__ __volatile__
  729. (
  730. "lock; subw %[argument], %[storage]\n\t"
  731. "setnz %[result]\n\t"
  732. : [storage] "+m" (storage), [result] "=q" (res)
  733. : [argument] "iq" (v)
  734. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  735. );
  736. }
  737. #endif
  738. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  739. return res;
  740. }
  741. static BOOST_FORCEINLINE bool and_and_test(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  742. {
  743. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  744. bool res;
  745. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  746. __asm__ __volatile__
  747. (
  748. "lock; andw %[argument], %[storage]\n\t"
  749. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  750. : [argument] "iq" (v)
  751. : "memory"
  752. );
  753. #else
  754. __asm__ __volatile__
  755. (
  756. "lock; andw %[argument], %[storage]\n\t"
  757. "setnz %[result]\n\t"
  758. : [storage] "+m" (storage), [result] "=q" (res)
  759. : [argument] "iq" (v)
  760. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  761. );
  762. #endif
  763. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  764. return res;
  765. }
  766. static BOOST_FORCEINLINE bool or_and_test(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  767. {
  768. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  769. bool res;
  770. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  771. __asm__ __volatile__
  772. (
  773. "lock; orw %[argument], %[storage]\n\t"
  774. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  775. : [argument] "iq" (v)
  776. : "memory"
  777. );
  778. #else
  779. __asm__ __volatile__
  780. (
  781. "lock; orw %[argument], %[storage]\n\t"
  782. "setnz %[result]\n\t"
  783. : [storage] "+m" (storage), [result] "=q" (res)
  784. : [argument] "iq" (v)
  785. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  786. );
  787. #endif
  788. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  789. return res;
  790. }
  791. static BOOST_FORCEINLINE bool xor_and_test(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  792. {
  793. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  794. bool res;
  795. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  796. __asm__ __volatile__
  797. (
  798. "lock; xorw %[argument], %[storage]\n\t"
  799. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  800. : [argument] "iq" (v)
  801. : "memory"
  802. );
  803. #else
  804. __asm__ __volatile__
  805. (
  806. "lock; xorw %[argument], %[storage]\n\t"
  807. "setnz %[result]\n\t"
  808. : [storage] "+m" (storage), [result] "=q" (res)
  809. : [argument] "iq" (v)
  810. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  811. );
  812. #endif
  813. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  814. return res;
  815. }
  816. static BOOST_FORCEINLINE bool bit_test_and_set(storage_type volatile& storage, unsigned int bit_number, memory_order order) noexcept
  817. {
  818. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  819. bool res;
  820. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  821. __asm__ __volatile__
  822. (
  823. "lock; btsw %[bit_number], %[storage]\n\t"
  824. : [storage] "+m" (storage), [result] "=@ccc" (res)
  825. : [bit_number] "Kq" ((std::uint16_t)bit_number)
  826. : "memory"
  827. );
  828. #else
  829. __asm__ __volatile__
  830. (
  831. "lock; btsw %[bit_number], %[storage]\n\t"
  832. "setc %[result]\n\t"
  833. : [storage] "+m" (storage), [result] "=q" (res)
  834. : [bit_number] "Kq" ((std::uint16_t)bit_number)
  835. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  836. );
  837. #endif
  838. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  839. return res;
  840. }
  841. static BOOST_FORCEINLINE bool bit_test_and_reset(storage_type volatile& storage, unsigned int bit_number, memory_order order) noexcept
  842. {
  843. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  844. bool res;
  845. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  846. __asm__ __volatile__
  847. (
  848. "lock; btrw %[bit_number], %[storage]\n\t"
  849. : [storage] "+m" (storage), [result] "=@ccc" (res)
  850. : [bit_number] "Kq" ((std::uint16_t)bit_number)
  851. : "memory"
  852. );
  853. #else
  854. __asm__ __volatile__
  855. (
  856. "lock; btrw %[bit_number], %[storage]\n\t"
  857. "setc %[result]\n\t"
  858. : [storage] "+m" (storage), [result] "=q" (res)
  859. : [bit_number] "Kq" ((std::uint16_t)bit_number)
  860. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  861. );
  862. #endif
  863. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  864. return res;
  865. }
  866. static BOOST_FORCEINLINE bool bit_test_and_complement(storage_type volatile& storage, unsigned int bit_number, memory_order order) noexcept
  867. {
  868. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  869. bool res;
  870. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  871. __asm__ __volatile__
  872. (
  873. "lock; btcw %[bit_number], %[storage]\n\t"
  874. : [storage] "+m" (storage), [result] "=@ccc" (res)
  875. : [bit_number] "Kq" ((std::uint16_t)bit_number)
  876. : "memory"
  877. );
  878. #else
  879. __asm__ __volatile__
  880. (
  881. "lock; btcw %[bit_number], %[storage]\n\t"
  882. "setc %[result]\n\t"
  883. : [storage] "+m" (storage), [result] "=q" (res)
  884. : [bit_number] "Kq" ((std::uint16_t)bit_number)
  885. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  886. );
  887. #endif
  888. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  889. return res;
  890. }
  891. };
  892. template< typename Base, bool Signed >
  893. struct extra_operations< Base, 4u, Signed, true > :
  894. public extra_operations_generic< Base, 4u, Signed >
  895. {
  896. using base_type = extra_operations_generic< Base, 4u, Signed >;
  897. using storage_type = typename base_type::storage_type;
  898. #define BOOST_ATOMIC_DETAIL_CAS_LOOP(op, original, result)\
  899. __asm__ __volatile__\
  900. (\
  901. ".align 16\n\t"\
  902. "1: mov %[orig], %[res]\n\t"\
  903. op " %[res]\n\t"\
  904. "lock; cmpxchgl %[res], %[storage]\n\t"\
  905. "jne 1b"\
  906. : [orig] "+a" (original), [storage] "+m" (storage), [res] "=&r" (result)\
  907. : \
  908. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
  909. )
  910. static BOOST_FORCEINLINE storage_type fetch_negate(storage_type volatile& storage, memory_order order) noexcept
  911. {
  912. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  913. storage_type original = storage;
  914. storage_type result;
  915. BOOST_ATOMIC_DETAIL_CAS_LOOP("negl", original, result);
  916. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  917. return original;
  918. }
  919. static BOOST_FORCEINLINE storage_type fetch_complement(storage_type volatile& storage, memory_order order) noexcept
  920. {
  921. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  922. storage_type original = storage;
  923. storage_type result;
  924. BOOST_ATOMIC_DETAIL_CAS_LOOP("notl", original, result);
  925. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  926. return original;
  927. }
  928. static BOOST_FORCEINLINE storage_type negate(storage_type volatile& storage, memory_order order) noexcept
  929. {
  930. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  931. storage_type original = storage;
  932. storage_type result;
  933. BOOST_ATOMIC_DETAIL_CAS_LOOP("negl", original, result);
  934. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  935. return result;
  936. }
  937. static BOOST_FORCEINLINE storage_type bitwise_complement(storage_type volatile& storage, memory_order order) noexcept
  938. {
  939. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  940. storage_type original = storage;
  941. storage_type result;
  942. BOOST_ATOMIC_DETAIL_CAS_LOOP("notl", original, result);
  943. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  944. return result;
  945. }
  946. #undef BOOST_ATOMIC_DETAIL_CAS_LOOP
  947. #define BOOST_ATOMIC_DETAIL_CAS_LOOP(op, argument, original, result)\
  948. __asm__ __volatile__\
  949. (\
  950. ".align 16\n\t"\
  951. "1: mov %[arg], %[res]\n\t"\
  952. op " %%eax, %[res]\n\t"\
  953. "lock; cmpxchgl %[res], %[storage]\n\t"\
  954. "jne 1b"\
  955. : [orig] "+a" (original), [storage] "+m" (storage), [res] "=&r" (result)\
  956. : [arg] "ir" (argument)\
  957. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
  958. )
  959. static BOOST_FORCEINLINE storage_type bitwise_and(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  960. {
  961. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  962. storage_type original = storage;
  963. storage_type result;
  964. BOOST_ATOMIC_DETAIL_CAS_LOOP("andl", v, original, result);
  965. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  966. return static_cast< storage_type >(result);
  967. }
  968. static BOOST_FORCEINLINE storage_type bitwise_or(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  969. {
  970. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  971. storage_type original = storage;
  972. storage_type result;
  973. BOOST_ATOMIC_DETAIL_CAS_LOOP("orl", v, original, result);
  974. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  975. return static_cast< storage_type >(result);
  976. }
  977. static BOOST_FORCEINLINE storage_type bitwise_xor(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  978. {
  979. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  980. storage_type original = storage;
  981. storage_type result;
  982. BOOST_ATOMIC_DETAIL_CAS_LOOP("xorl", v, original, result);
  983. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  984. return static_cast< storage_type >(result);
  985. }
  986. #undef BOOST_ATOMIC_DETAIL_CAS_LOOP
  987. static BOOST_FORCEINLINE bool negate_and_test(storage_type volatile& storage, memory_order order) noexcept
  988. {
  989. return !!negate(storage, order);
  990. }
  991. static BOOST_FORCEINLINE bool complement_and_test(storage_type volatile& storage, memory_order order) noexcept
  992. {
  993. return !!bitwise_complement(storage, order);
  994. }
  995. static BOOST_FORCEINLINE void opaque_add(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  996. {
  997. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  998. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  999. {
  1000. __asm__ __volatile__
  1001. (
  1002. "lock; incl %[storage]\n\t"
  1003. : [storage] "+m" (storage)
  1004. :
  1005. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1006. );
  1007. }
  1008. else
  1009. {
  1010. __asm__ __volatile__
  1011. (
  1012. "lock; addl %[argument], %[storage]\n\t"
  1013. : [storage] "+m" (storage)
  1014. : [argument] "ir" (v)
  1015. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1016. );
  1017. }
  1018. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  1019. }
  1020. static BOOST_FORCEINLINE void opaque_sub(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  1021. {
  1022. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  1023. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  1024. {
  1025. __asm__ __volatile__
  1026. (
  1027. "lock; decl %[storage]\n\t"
  1028. : [storage] "+m" (storage)
  1029. :
  1030. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1031. );
  1032. }
  1033. else
  1034. {
  1035. __asm__ __volatile__
  1036. (
  1037. "lock; subl %[argument], %[storage]\n\t"
  1038. : [storage] "+m" (storage)
  1039. : [argument] "ir" (v)
  1040. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1041. );
  1042. }
  1043. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  1044. }
  1045. static BOOST_FORCEINLINE void opaque_negate(storage_type volatile& storage, memory_order order) noexcept
  1046. {
  1047. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  1048. __asm__ __volatile__
  1049. (
  1050. "lock; negl %[storage]\n\t"
  1051. : [storage] "+m" (storage)
  1052. :
  1053. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1054. );
  1055. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  1056. }
  1057. static BOOST_FORCEINLINE void opaque_and(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  1058. {
  1059. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  1060. __asm__ __volatile__
  1061. (
  1062. "lock; andl %[argument], %[storage]\n\t"
  1063. : [storage] "+m" (storage)
  1064. : [argument] "ir" (v)
  1065. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1066. );
  1067. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  1068. }
  1069. static BOOST_FORCEINLINE void opaque_or(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  1070. {
  1071. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  1072. __asm__ __volatile__
  1073. (
  1074. "lock; orl %[argument], %[storage]\n\t"
  1075. : [storage] "+m" (storage)
  1076. : [argument] "ir" (v)
  1077. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1078. );
  1079. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  1080. }
  1081. static BOOST_FORCEINLINE void opaque_xor(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  1082. {
  1083. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  1084. __asm__ __volatile__
  1085. (
  1086. "lock; xorl %[argument], %[storage]\n\t"
  1087. : [storage] "+m" (storage)
  1088. : [argument] "ir" (v)
  1089. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1090. );
  1091. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  1092. }
  1093. static BOOST_FORCEINLINE void opaque_complement(storage_type volatile& storage, memory_order order) noexcept
  1094. {
  1095. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  1096. __asm__ __volatile__
  1097. (
  1098. "lock; notl %[storage]\n\t"
  1099. : [storage] "+m" (storage)
  1100. :
  1101. : "memory"
  1102. );
  1103. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  1104. }
  1105. static BOOST_FORCEINLINE bool add_and_test(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  1106. {
  1107. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  1108. bool res;
  1109. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  1110. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  1111. {
  1112. __asm__ __volatile__
  1113. (
  1114. "lock; incl %[storage]\n\t"
  1115. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  1116. :
  1117. : "memory"
  1118. );
  1119. }
  1120. else
  1121. {
  1122. __asm__ __volatile__
  1123. (
  1124. "lock; addl %[argument], %[storage]\n\t"
  1125. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  1126. : [argument] "ir" (v)
  1127. : "memory"
  1128. );
  1129. }
  1130. #else
  1131. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  1132. {
  1133. __asm__ __volatile__
  1134. (
  1135. "lock; incl %[storage]\n\t"
  1136. "setnz %[result]\n\t"
  1137. : [storage] "+m" (storage), [result] "=q" (res)
  1138. :
  1139. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1140. );
  1141. }
  1142. else
  1143. {
  1144. __asm__ __volatile__
  1145. (
  1146. "lock; addl %[argument], %[storage]\n\t"
  1147. "setnz %[result]\n\t"
  1148. : [storage] "+m" (storage), [result] "=q" (res)
  1149. : [argument] "ir" (v)
  1150. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1151. );
  1152. }
  1153. #endif
  1154. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  1155. return res;
  1156. }
  1157. static BOOST_FORCEINLINE bool sub_and_test(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  1158. {
  1159. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  1160. bool res;
  1161. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  1162. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  1163. {
  1164. __asm__ __volatile__
  1165. (
  1166. "lock; decl %[storage]\n\t"
  1167. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  1168. :
  1169. : "memory"
  1170. );
  1171. }
  1172. else
  1173. {
  1174. __asm__ __volatile__
  1175. (
  1176. "lock; subl %[argument], %[storage]\n\t"
  1177. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  1178. : [argument] "ir" (v)
  1179. : "memory"
  1180. );
  1181. }
  1182. #else
  1183. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  1184. {
  1185. __asm__ __volatile__
  1186. (
  1187. "lock; decl %[storage]\n\t"
  1188. "setnz %[result]\n\t"
  1189. : [storage] "+m" (storage), [result] "=q" (res)
  1190. :
  1191. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1192. );
  1193. }
  1194. else
  1195. {
  1196. __asm__ __volatile__
  1197. (
  1198. "lock; subl %[argument], %[storage]\n\t"
  1199. "setnz %[result]\n\t"
  1200. : [storage] "+m" (storage), [result] "=q" (res)
  1201. : [argument] "ir" (v)
  1202. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1203. );
  1204. }
  1205. #endif
  1206. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  1207. return res;
  1208. }
  1209. static BOOST_FORCEINLINE bool and_and_test(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  1210. {
  1211. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  1212. bool res;
  1213. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  1214. __asm__ __volatile__
  1215. (
  1216. "lock; andl %[argument], %[storage]\n\t"
  1217. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  1218. : [argument] "ir" (v)
  1219. : "memory"
  1220. );
  1221. #else
  1222. __asm__ __volatile__
  1223. (
  1224. "lock; andl %[argument], %[storage]\n\t"
  1225. "setnz %[result]\n\t"
  1226. : [storage] "+m" (storage), [result] "=q" (res)
  1227. : [argument] "ir" (v)
  1228. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1229. );
  1230. #endif
  1231. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  1232. return res;
  1233. }
  1234. static BOOST_FORCEINLINE bool or_and_test(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  1235. {
  1236. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  1237. bool res;
  1238. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  1239. __asm__ __volatile__
  1240. (
  1241. "lock; orl %[argument], %[storage]\n\t"
  1242. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  1243. : [argument] "ir" (v)
  1244. : "memory"
  1245. );
  1246. #else
  1247. __asm__ __volatile__
  1248. (
  1249. "lock; orl %[argument], %[storage]\n\t"
  1250. "setnz %[result]\n\t"
  1251. : [storage] "+m" (storage), [result] "=q" (res)
  1252. : [argument] "ir" (v)
  1253. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1254. );
  1255. #endif
  1256. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  1257. return res;
  1258. }
  1259. static BOOST_FORCEINLINE bool xor_and_test(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  1260. {
  1261. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  1262. bool res;
  1263. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  1264. __asm__ __volatile__
  1265. (
  1266. "lock; xorl %[argument], %[storage]\n\t"
  1267. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  1268. : [argument] "ir" (v)
  1269. : "memory"
  1270. );
  1271. #else
  1272. __asm__ __volatile__
  1273. (
  1274. "lock; xorl %[argument], %[storage]\n\t"
  1275. "setnz %[result]\n\t"
  1276. : [storage] "+m" (storage), [result] "=q" (res)
  1277. : [argument] "ir" (v)
  1278. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1279. );
  1280. #endif
  1281. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  1282. return res;
  1283. }
  1284. static BOOST_FORCEINLINE bool bit_test_and_set(storage_type volatile& storage, unsigned int bit_number, memory_order order) noexcept
  1285. {
  1286. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  1287. bool res;
  1288. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  1289. __asm__ __volatile__
  1290. (
  1291. "lock; btsl %[bit_number], %[storage]\n\t"
  1292. : [storage] "+m" (storage), [result] "=@ccc" (res)
  1293. : [bit_number] "Kr" ((std::uint32_t)bit_number)
  1294. : "memory"
  1295. );
  1296. #else
  1297. __asm__ __volatile__
  1298. (
  1299. "lock; btsl %[bit_number], %[storage]\n\t"
  1300. "setc %[result]\n\t"
  1301. : [storage] "+m" (storage), [result] "=q" (res)
  1302. : [bit_number] "Kr" ((std::uint32_t)bit_number)
  1303. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1304. );
  1305. #endif
  1306. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  1307. return res;
  1308. }
  1309. static BOOST_FORCEINLINE bool bit_test_and_reset(storage_type volatile& storage, unsigned int bit_number, memory_order order) noexcept
  1310. {
  1311. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  1312. bool res;
  1313. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  1314. __asm__ __volatile__
  1315. (
  1316. "lock; btrl %[bit_number], %[storage]\n\t"
  1317. : [storage] "+m" (storage), [result] "=@ccc" (res)
  1318. : [bit_number] "Kr" ((std::uint32_t)bit_number)
  1319. : "memory"
  1320. );
  1321. #else
  1322. __asm__ __volatile__
  1323. (
  1324. "lock; btrl %[bit_number], %[storage]\n\t"
  1325. "setc %[result]\n\t"
  1326. : [storage] "+m" (storage), [result] "=q" (res)
  1327. : [bit_number] "Kr" ((std::uint32_t)bit_number)
  1328. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1329. );
  1330. #endif
  1331. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  1332. return res;
  1333. }
  1334. static BOOST_FORCEINLINE bool bit_test_and_complement(storage_type volatile& storage, unsigned int bit_number, memory_order order) noexcept
  1335. {
  1336. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  1337. bool res;
  1338. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  1339. __asm__ __volatile__
  1340. (
  1341. "lock; btcl %[bit_number], %[storage]\n\t"
  1342. : [storage] "+m" (storage), [result] "=@ccc" (res)
  1343. : [bit_number] "Kr" ((std::uint32_t)bit_number)
  1344. : "memory"
  1345. );
  1346. #else
  1347. __asm__ __volatile__
  1348. (
  1349. "lock; btcl %[bit_number], %[storage]\n\t"
  1350. "setc %[result]\n\t"
  1351. : [storage] "+m" (storage), [result] "=q" (res)
  1352. : [bit_number] "Kr" ((std::uint32_t)bit_number)
  1353. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1354. );
  1355. #endif
  1356. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  1357. return res;
  1358. }
  1359. };
  1360. #if defined(__x86_64__)
  1361. template< typename Base, bool Signed >
  1362. struct extra_operations< Base, 8u, Signed, true > :
  1363. public extra_operations_generic< Base, 8u, Signed >
  1364. {
  1365. using base_type = extra_operations_generic< Base, 8u, Signed >;
  1366. using storage_type = typename base_type::storage_type;
  1367. #define BOOST_ATOMIC_DETAIL_CAS_LOOP(op, original, result)\
  1368. __asm__ __volatile__\
  1369. (\
  1370. ".align 16\n\t"\
  1371. "1: mov %[orig], %[res]\n\t"\
  1372. op " %[res]\n\t"\
  1373. "lock; cmpxchgq %[res], %[storage]\n\t"\
  1374. "jne 1b"\
  1375. : [orig] "+a" (original), [storage] "+m" (storage), [res] "=&r" (result)\
  1376. : \
  1377. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
  1378. )
  1379. static BOOST_FORCEINLINE storage_type fetch_negate(storage_type volatile& storage, memory_order order) noexcept
  1380. {
  1381. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  1382. storage_type original = storage;
  1383. storage_type result;
  1384. BOOST_ATOMIC_DETAIL_CAS_LOOP("negq", original, result);
  1385. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  1386. return original;
  1387. }
  1388. static BOOST_FORCEINLINE storage_type fetch_complement(storage_type volatile& storage, memory_order order) noexcept
  1389. {
  1390. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  1391. storage_type original = storage;
  1392. storage_type result;
  1393. BOOST_ATOMIC_DETAIL_CAS_LOOP("notq", original, result);
  1394. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  1395. return original;
  1396. }
  1397. static BOOST_FORCEINLINE storage_type negate(storage_type volatile& storage, memory_order order) noexcept
  1398. {
  1399. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  1400. storage_type original = storage;
  1401. storage_type result;
  1402. BOOST_ATOMIC_DETAIL_CAS_LOOP("negq", original, result);
  1403. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  1404. return result;
  1405. }
  1406. static BOOST_FORCEINLINE storage_type bitwise_complement(storage_type volatile& storage, memory_order order) noexcept
  1407. {
  1408. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  1409. storage_type original = storage;
  1410. storage_type result;
  1411. BOOST_ATOMIC_DETAIL_CAS_LOOP("notq", original, result);
  1412. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  1413. return result;
  1414. }
  1415. #undef BOOST_ATOMIC_DETAIL_CAS_LOOP
  1416. #define BOOST_ATOMIC_DETAIL_CAS_LOOP(op, argument, original, result)\
  1417. __asm__ __volatile__\
  1418. (\
  1419. ".align 16\n\t"\
  1420. "1: mov %[arg], %[res]\n\t"\
  1421. op " %%rax, %[res]\n\t"\
  1422. "lock; cmpxchgq %[res], %[storage]\n\t"\
  1423. "jne 1b"\
  1424. : [orig] "+a" (original), [storage] "+m" (storage), [res] "=&r" (result)\
  1425. : [arg] "r" (argument)\
  1426. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
  1427. )
  1428. static BOOST_FORCEINLINE storage_type bitwise_and(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  1429. {
  1430. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  1431. storage_type original = storage;
  1432. storage_type result;
  1433. BOOST_ATOMIC_DETAIL_CAS_LOOP("andq", v, original, result);
  1434. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  1435. return static_cast< storage_type >(result);
  1436. }
  1437. static BOOST_FORCEINLINE storage_type bitwise_or(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  1438. {
  1439. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  1440. storage_type original = storage;
  1441. storage_type result;
  1442. BOOST_ATOMIC_DETAIL_CAS_LOOP("orq", v, original, result);
  1443. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  1444. return static_cast< storage_type >(result);
  1445. }
  1446. static BOOST_FORCEINLINE storage_type bitwise_xor(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  1447. {
  1448. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  1449. storage_type original = storage;
  1450. storage_type result;
  1451. BOOST_ATOMIC_DETAIL_CAS_LOOP("xorq", v, original, result);
  1452. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  1453. return static_cast< storage_type >(result);
  1454. }
  1455. #undef BOOST_ATOMIC_DETAIL_CAS_LOOP
  1456. static BOOST_FORCEINLINE bool negate_and_test(storage_type volatile& storage, memory_order order) noexcept
  1457. {
  1458. return !!negate(storage, order);
  1459. }
  1460. static BOOST_FORCEINLINE bool complement_and_test(storage_type volatile& storage, memory_order order) noexcept
  1461. {
  1462. return !!bitwise_complement(storage, order);
  1463. }
  1464. static BOOST_FORCEINLINE void opaque_add(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  1465. {
  1466. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  1467. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  1468. {
  1469. __asm__ __volatile__
  1470. (
  1471. "lock; incq %[storage]\n\t"
  1472. : [storage] "+m" (storage)
  1473. :
  1474. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1475. );
  1476. }
  1477. else
  1478. {
  1479. __asm__ __volatile__
  1480. (
  1481. "lock; addq %[argument], %[storage]\n\t"
  1482. : [storage] "+m" (storage)
  1483. : [argument] "er" (v)
  1484. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1485. );
  1486. }
  1487. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  1488. }
  1489. static BOOST_FORCEINLINE void opaque_sub(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  1490. {
  1491. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  1492. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  1493. {
  1494. __asm__ __volatile__
  1495. (
  1496. "lock; decq %[storage]\n\t"
  1497. : [storage] "+m" (storage)
  1498. :
  1499. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1500. );
  1501. }
  1502. else
  1503. {
  1504. __asm__ __volatile__
  1505. (
  1506. "lock; subq %[argument], %[storage]\n\t"
  1507. : [storage] "+m" (storage)
  1508. : [argument] "er" (v)
  1509. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1510. );
  1511. }
  1512. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  1513. }
  1514. static BOOST_FORCEINLINE void opaque_negate(storage_type volatile& storage, memory_order order) noexcept
  1515. {
  1516. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  1517. __asm__ __volatile__
  1518. (
  1519. "lock; negq %[storage]\n\t"
  1520. : [storage] "+m" (storage)
  1521. :
  1522. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1523. );
  1524. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  1525. }
  1526. static BOOST_FORCEINLINE void opaque_and(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  1527. {
  1528. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  1529. __asm__ __volatile__
  1530. (
  1531. "lock; andq %[argument], %[storage]\n\t"
  1532. : [storage] "+m" (storage)
  1533. : [argument] "er" (v)
  1534. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1535. );
  1536. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  1537. }
  1538. static BOOST_FORCEINLINE void opaque_or(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  1539. {
  1540. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  1541. __asm__ __volatile__
  1542. (
  1543. "lock; orq %[argument], %[storage]\n\t"
  1544. : [storage] "+m" (storage)
  1545. : [argument] "er" (v)
  1546. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1547. );
  1548. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  1549. }
  1550. static BOOST_FORCEINLINE void opaque_xor(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  1551. {
  1552. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  1553. __asm__ __volatile__
  1554. (
  1555. "lock; xorq %[argument], %[storage]\n\t"
  1556. : [storage] "+m" (storage)
  1557. : [argument] "er" (v)
  1558. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1559. );
  1560. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  1561. }
  1562. static BOOST_FORCEINLINE void opaque_complement(storage_type volatile& storage, memory_order order) noexcept
  1563. {
  1564. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  1565. __asm__ __volatile__
  1566. (
  1567. "lock; notq %[storage]\n\t"
  1568. : [storage] "+m" (storage)
  1569. :
  1570. : "memory"
  1571. );
  1572. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  1573. }
  1574. static BOOST_FORCEINLINE bool add_and_test(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  1575. {
  1576. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  1577. bool res;
  1578. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  1579. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  1580. {
  1581. __asm__ __volatile__
  1582. (
  1583. "lock; incq %[storage]\n\t"
  1584. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  1585. :
  1586. : "memory"
  1587. );
  1588. }
  1589. else
  1590. {
  1591. __asm__ __volatile__
  1592. (
  1593. "lock; addq %[argument], %[storage]\n\t"
  1594. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  1595. : [argument] "er" (v)
  1596. : "memory"
  1597. );
  1598. }
  1599. #else
  1600. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  1601. {
  1602. __asm__ __volatile__
  1603. (
  1604. "lock; incq %[storage]\n\t"
  1605. "setnz %[result]\n\t"
  1606. : [storage] "+m" (storage), [result] "=q" (res)
  1607. :
  1608. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1609. );
  1610. }
  1611. else
  1612. {
  1613. __asm__ __volatile__
  1614. (
  1615. "lock; addq %[argument], %[storage]\n\t"
  1616. "setnz %[result]\n\t"
  1617. : [storage] "+m" (storage), [result] "=q" (res)
  1618. : [argument] "er" (v)
  1619. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1620. );
  1621. }
  1622. #endif
  1623. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  1624. return res;
  1625. }
  1626. static BOOST_FORCEINLINE bool sub_and_test(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  1627. {
  1628. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  1629. bool res;
  1630. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  1631. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  1632. {
  1633. __asm__ __volatile__
  1634. (
  1635. "lock; decq %[storage]\n\t"
  1636. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  1637. :
  1638. : "memory"
  1639. );
  1640. }
  1641. else
  1642. {
  1643. __asm__ __volatile__
  1644. (
  1645. "lock; subq %[argument], %[storage]\n\t"
  1646. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  1647. : [argument] "er" (v)
  1648. : "memory"
  1649. );
  1650. }
  1651. #else
  1652. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  1653. {
  1654. __asm__ __volatile__
  1655. (
  1656. "lock; decq %[storage]\n\t"
  1657. "setnz %[result]\n\t"
  1658. : [storage] "+m" (storage), [result] "=q" (res)
  1659. :
  1660. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1661. );
  1662. }
  1663. else
  1664. {
  1665. __asm__ __volatile__
  1666. (
  1667. "lock; subq %[argument], %[storage]\n\t"
  1668. "setnz %[result]\n\t"
  1669. : [storage] "+m" (storage), [result] "=q" (res)
  1670. : [argument] "er" (v)
  1671. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1672. );
  1673. }
  1674. #endif
  1675. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  1676. return res;
  1677. }
  1678. static BOOST_FORCEINLINE bool and_and_test(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  1679. {
  1680. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  1681. bool res;
  1682. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  1683. __asm__ __volatile__
  1684. (
  1685. "lock; andq %[argument], %[storage]\n\t"
  1686. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  1687. : [argument] "er" (v)
  1688. : "memory"
  1689. );
  1690. #else
  1691. __asm__ __volatile__
  1692. (
  1693. "lock; andq %[argument], %[storage]\n\t"
  1694. "setnz %[result]\n\t"
  1695. : [storage] "+m" (storage), [result] "=q" (res)
  1696. : [argument] "er" (v)
  1697. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1698. );
  1699. #endif
  1700. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  1701. return res;
  1702. }
  1703. static BOOST_FORCEINLINE bool or_and_test(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  1704. {
  1705. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  1706. bool res;
  1707. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  1708. __asm__ __volatile__
  1709. (
  1710. "lock; orq %[argument], %[storage]\n\t"
  1711. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  1712. : [argument] "er" (v)
  1713. : "memory"
  1714. );
  1715. #else
  1716. __asm__ __volatile__
  1717. (
  1718. "lock; orq %[argument], %[storage]\n\t"
  1719. "setnz %[result]\n\t"
  1720. : [storage] "+m" (storage), [result] "=q" (res)
  1721. : [argument] "er" (v)
  1722. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1723. );
  1724. #endif
  1725. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  1726. return res;
  1727. }
  1728. static BOOST_FORCEINLINE bool xor_and_test(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  1729. {
  1730. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  1731. bool res;
  1732. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  1733. __asm__ __volatile__
  1734. (
  1735. "lock; xorq %[argument], %[storage]\n\t"
  1736. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  1737. : [argument] "er" (v)
  1738. : "memory"
  1739. );
  1740. #else
  1741. __asm__ __volatile__
  1742. (
  1743. "lock; xorq %[argument], %[storage]\n\t"
  1744. "setnz %[result]\n\t"
  1745. : [storage] "+m" (storage), [result] "=q" (res)
  1746. : [argument] "er" (v)
  1747. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1748. );
  1749. #endif
  1750. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  1751. return res;
  1752. }
  1753. static BOOST_FORCEINLINE bool bit_test_and_set(storage_type volatile& storage, unsigned int bit_number, memory_order order) noexcept
  1754. {
  1755. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  1756. bool res;
  1757. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  1758. __asm__ __volatile__
  1759. (
  1760. "lock; btsq %[bit_number], %[storage]\n\t"
  1761. : [storage] "+m" (storage), [result] "=@ccc" (res)
  1762. : [bit_number] "Kr" ((std::uint64_t)bit_number)
  1763. : "memory"
  1764. );
  1765. #else
  1766. __asm__ __volatile__
  1767. (
  1768. "lock; btsq %[bit_number], %[storage]\n\t"
  1769. "setc %[result]\n\t"
  1770. : [storage] "+m" (storage), [result] "=q" (res)
  1771. : [bit_number] "Kr" ((std::uint64_t)bit_number)
  1772. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1773. );
  1774. #endif
  1775. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  1776. return res;
  1777. }
  1778. static BOOST_FORCEINLINE bool bit_test_and_reset(storage_type volatile& storage, unsigned int bit_number, memory_order order) noexcept
  1779. {
  1780. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  1781. bool res;
  1782. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  1783. __asm__ __volatile__
  1784. (
  1785. "lock; btrq %[bit_number], %[storage]\n\t"
  1786. : [storage] "+m" (storage), [result] "=@ccc" (res)
  1787. : [bit_number] "Kr" ((std::uint64_t)bit_number)
  1788. : "memory"
  1789. );
  1790. #else
  1791. __asm__ __volatile__
  1792. (
  1793. "lock; btrq %[bit_number], %[storage]\n\t"
  1794. "setc %[result]\n\t"
  1795. : [storage] "+m" (storage), [result] "=q" (res)
  1796. : [bit_number] "Kr" ((std::uint64_t)bit_number)
  1797. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1798. );
  1799. #endif
  1800. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  1801. return res;
  1802. }
  1803. static BOOST_FORCEINLINE bool bit_test_and_complement(storage_type volatile& storage, unsigned int bit_number, memory_order order) noexcept
  1804. {
  1805. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  1806. bool res;
  1807. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  1808. __asm__ __volatile__
  1809. (
  1810. "lock; btcq %[bit_number], %[storage]\n\t"
  1811. : [storage] "+m" (storage), [result] "=@ccc" (res)
  1812. : [bit_number] "Kr" ((std::uint64_t)bit_number)
  1813. : "memory"
  1814. );
  1815. #else
  1816. __asm__ __volatile__
  1817. (
  1818. "lock; btcq %[bit_number], %[storage]\n\t"
  1819. "setc %[result]\n\t"
  1820. : [storage] "+m" (storage), [result] "=q" (res)
  1821. : [bit_number] "Kr" ((std::uint64_t)bit_number)
  1822. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1823. );
  1824. #endif
  1825. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  1826. return res;
  1827. }
  1828. };
  1829. #endif // defined(__x86_64__)
  1830. } // namespace detail
  1831. } // namespace atomics
  1832. } // namespace boost
  1833. #include <boost/atomic/detail/footer.hpp>
  1834. #endif // BOOST_ATOMIC_DETAIL_EXTRA_OPS_GCC_X86_HPP_INCLUDED_