extra_ops_gcc_x86.hpp 52 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657
  1. /*
  2. * Distributed under the Boost Software License, Version 1.0.
  3. * (See accompanying file LICENSE_1_0.txt or copy at
  4. * http://www.boost.org/LICENSE_1_0.txt)
  5. *
  6. * Copyright (c) 2015 Andrey Semashev
  7. */
  8. /*!
  9. * \file atomic/detail/extra_ops_gcc_x86.hpp
  10. *
  11. * This header contains implementation of the extra atomic operations for x86.
  12. */
  13. #ifndef BOOST_ATOMIC_DETAIL_EXTRA_OPS_GCC_X86_HPP_INCLUDED_
  14. #define BOOST_ATOMIC_DETAIL_EXTRA_OPS_GCC_X86_HPP_INCLUDED_
  15. #include <cstddef>
  16. #include <boost/memory_order.hpp>
  17. #include <boost/atomic/detail/config.hpp>
  18. #include <boost/atomic/detail/storage_type.hpp>
  19. #include <boost/atomic/detail/extra_operations_fwd.hpp>
  20. #include <boost/atomic/capabilities.hpp>
  21. #ifdef BOOST_HAS_PRAGMA_ONCE
  22. #pragma once
  23. #endif
  24. namespace boost {
  25. namespace atomics {
  26. namespace detail {
  27. template< typename Base >
  28. struct gcc_x86_extra_operations_common :
  29. public Base
  30. {
  31. typedef Base base_type;
  32. typedef typename base_type::storage_type storage_type;
  33. static BOOST_FORCEINLINE storage_type add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  34. {
  35. return static_cast< storage_type >(Base::fetch_add(storage, v, order) + v);
  36. }
  37. static BOOST_FORCEINLINE storage_type sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
  38. {
  39. return static_cast< storage_type >(Base::fetch_sub(storage, v, order) - v);
  40. }
  41. static BOOST_FORCEINLINE bool bit_test_and_set(storage_type volatile& storage, unsigned int bit_number, memory_order) BOOST_NOEXCEPT
  42. {
  43. bool res;
  44. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  45. __asm__ __volatile__
  46. (
  47. "lock; bts %[bit_number], %[storage]\n\t"
  48. : [storage] "+m" (storage), [result] "=@ccc" (res)
  49. : [bit_number] "Kq" (bit_number)
  50. : "memory"
  51. );
  52. #else
  53. __asm__ __volatile__
  54. (
  55. "lock; bts %[bit_number], %[storage]\n\t"
  56. "setc %[result]\n\t"
  57. : [storage] "+m" (storage), [result] "=q" (res)
  58. : [bit_number] "Kq" (bit_number)
  59. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  60. );
  61. #endif
  62. return res;
  63. }
  64. static BOOST_FORCEINLINE bool bit_test_and_reset(storage_type volatile& storage, unsigned int bit_number, memory_order) BOOST_NOEXCEPT
  65. {
  66. bool res;
  67. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  68. __asm__ __volatile__
  69. (
  70. "lock; btr %[bit_number], %[storage]\n\t"
  71. : [storage] "+m" (storage), [result] "=@ccc" (res)
  72. : [bit_number] "Kq" (bit_number)
  73. : "memory"
  74. );
  75. #else
  76. __asm__ __volatile__
  77. (
  78. "lock; btr %[bit_number], %[storage]\n\t"
  79. "setc %[result]\n\t"
  80. : [storage] "+m" (storage), [result] "=q" (res)
  81. : [bit_number] "Kq" (bit_number)
  82. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  83. );
  84. #endif
  85. return res;
  86. }
  87. static BOOST_FORCEINLINE bool bit_test_and_complement(storage_type volatile& storage, unsigned int bit_number, memory_order) BOOST_NOEXCEPT
  88. {
  89. bool res;
  90. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  91. __asm__ __volatile__
  92. (
  93. "lock; btc %[bit_number], %[storage]\n\t"
  94. : [storage] "+m" (storage), [result] "=@ccc" (res)
  95. : [bit_number] "Kq" (bit_number)
  96. : "memory"
  97. );
  98. #else
  99. __asm__ __volatile__
  100. (
  101. "lock; btc %[bit_number], %[storage]\n\t"
  102. "setc %[result]\n\t"
  103. : [storage] "+m" (storage), [result] "=q" (res)
  104. : [bit_number] "Kq" (bit_number)
  105. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  106. );
  107. #endif
  108. return res;
  109. }
  110. };
  111. template< typename Base, bool Signed >
  112. struct extra_operations< Base, 1u, Signed, true > :
  113. public gcc_x86_extra_operations_common< Base >
  114. {
  115. typedef gcc_x86_extra_operations_common< Base > base_type;
  116. typedef typename base_type::storage_type storage_type;
  117. typedef typename make_storage_type< 4u >::type temp_storage_type;
  118. #define BOOST_ATOMIC_DETAIL_CAS_LOOP(op, original, result)\
  119. __asm__ __volatile__\
  120. (\
  121. ".align 16\n\t"\
  122. "1: movzbl %[orig], %2\n\t"\
  123. op " %b2\n\t"\
  124. "lock; cmpxchgb %b2, %[storage]\n\t"\
  125. "jne 1b"\
  126. : [orig] "+a" (original), [storage] "+m" (storage), "=&q" (result)\
  127. : \
  128. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
  129. )
  130. static BOOST_FORCEINLINE storage_type fetch_negate(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  131. {
  132. storage_type original = storage;
  133. temp_storage_type result;
  134. BOOST_ATOMIC_DETAIL_CAS_LOOP("negb", original, result);
  135. return original;
  136. }
  137. static BOOST_FORCEINLINE storage_type fetch_complement(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  138. {
  139. storage_type original = storage;
  140. temp_storage_type result;
  141. BOOST_ATOMIC_DETAIL_CAS_LOOP("notb", original, result);
  142. return original;
  143. }
  144. static BOOST_FORCEINLINE storage_type negate(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  145. {
  146. storage_type original = storage;
  147. temp_storage_type result;
  148. BOOST_ATOMIC_DETAIL_CAS_LOOP("negb", original, result);
  149. return static_cast< storage_type >(result);
  150. }
  151. static BOOST_FORCEINLINE storage_type bitwise_complement(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  152. {
  153. storage_type original = storage;
  154. temp_storage_type result;
  155. BOOST_ATOMIC_DETAIL_CAS_LOOP("notb", original, result);
  156. return static_cast< storage_type >(result);
  157. }
  158. #undef BOOST_ATOMIC_DETAIL_CAS_LOOP
  159. #define BOOST_ATOMIC_DETAIL_CAS_LOOP(op, argument, original, result)\
  160. __asm__ __volatile__\
  161. (\
  162. ".align 16\n\t"\
  163. "1: mov %[arg], %2\n\t"\
  164. op " %%al, %b2\n\t"\
  165. "lock; cmpxchgb %b2, %[storage]\n\t"\
  166. "jne 1b"\
  167. : [orig] "+a" (original), [storage] "+m" (storage), "=&q" (result)\
  168. : [arg] "ir" ((temp_storage_type)argument)\
  169. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
  170. )
  171. static BOOST_FORCEINLINE storage_type bitwise_and(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  172. {
  173. storage_type original = storage;
  174. temp_storage_type result;
  175. BOOST_ATOMIC_DETAIL_CAS_LOOP("andb", v, original, result);
  176. return static_cast< storage_type >(result);
  177. }
  178. static BOOST_FORCEINLINE storage_type bitwise_or(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  179. {
  180. storage_type original = storage;
  181. temp_storage_type result;
  182. BOOST_ATOMIC_DETAIL_CAS_LOOP("orb", v, original, result);
  183. return static_cast< storage_type >(result);
  184. }
  185. static BOOST_FORCEINLINE storage_type bitwise_xor(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  186. {
  187. storage_type original = storage;
  188. temp_storage_type result;
  189. BOOST_ATOMIC_DETAIL_CAS_LOOP("xorb", v, original, result);
  190. return static_cast< storage_type >(result);
  191. }
  192. #undef BOOST_ATOMIC_DETAIL_CAS_LOOP
  193. static BOOST_FORCEINLINE bool negate_and_test(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  194. {
  195. return !!negate(storage, order);
  196. }
  197. static BOOST_FORCEINLINE bool complement_and_test(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  198. {
  199. return !!bitwise_complement(storage, order);
  200. }
  201. static BOOST_FORCEINLINE void opaque_add(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  202. {
  203. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  204. {
  205. __asm__ __volatile__
  206. (
  207. "lock; incb %[storage]\n\t"
  208. : [storage] "+m" (storage)
  209. :
  210. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  211. );
  212. }
  213. else
  214. {
  215. __asm__ __volatile__
  216. (
  217. "lock; addb %[argument], %[storage]\n\t"
  218. : [storage] "+m" (storage)
  219. : [argument] "iq" (v)
  220. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  221. );
  222. }
  223. }
  224. static BOOST_FORCEINLINE void opaque_sub(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  225. {
  226. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  227. {
  228. __asm__ __volatile__
  229. (
  230. "lock; decb %[storage]\n\t"
  231. : [storage] "+m" (storage)
  232. :
  233. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  234. );
  235. }
  236. else
  237. {
  238. __asm__ __volatile__
  239. (
  240. "lock; subb %[argument], %[storage]\n\t"
  241. : [storage] "+m" (storage)
  242. : [argument] "iq" (v)
  243. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  244. );
  245. }
  246. }
  247. static BOOST_FORCEINLINE void opaque_negate(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  248. {
  249. __asm__ __volatile__
  250. (
  251. "lock; negb %[storage]\n\t"
  252. : [storage] "+m" (storage)
  253. :
  254. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  255. );
  256. }
  257. static BOOST_FORCEINLINE void opaque_and(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  258. {
  259. __asm__ __volatile__
  260. (
  261. "lock; andb %[argument], %[storage]\n\t"
  262. : [storage] "+m" (storage)
  263. : [argument] "iq" (v)
  264. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  265. );
  266. }
  267. static BOOST_FORCEINLINE void opaque_or(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  268. {
  269. __asm__ __volatile__
  270. (
  271. "lock; orb %[argument], %[storage]\n\t"
  272. : [storage] "+m" (storage)
  273. : [argument] "iq" (v)
  274. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  275. );
  276. }
  277. static BOOST_FORCEINLINE void opaque_xor(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  278. {
  279. __asm__ __volatile__
  280. (
  281. "lock; xorb %[argument], %[storage]\n\t"
  282. : [storage] "+m" (storage)
  283. : [argument] "iq" (v)
  284. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  285. );
  286. }
  287. static BOOST_FORCEINLINE void opaque_complement(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  288. {
  289. __asm__ __volatile__
  290. (
  291. "lock; notb %[storage]\n\t"
  292. : [storage] "+m" (storage)
  293. :
  294. : "memory"
  295. );
  296. }
  297. static BOOST_FORCEINLINE bool add_and_test(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  298. {
  299. bool res;
  300. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  301. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  302. {
  303. __asm__ __volatile__
  304. (
  305. "lock; incb %[storage]\n\t"
  306. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  307. :
  308. : "memory"
  309. );
  310. }
  311. else
  312. {
  313. __asm__ __volatile__
  314. (
  315. "lock; addb %[argument], %[storage]\n\t"
  316. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  317. : [argument] "iq" (v)
  318. : "memory"
  319. );
  320. }
  321. #else
  322. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  323. {
  324. __asm__ __volatile__
  325. (
  326. "lock; incb %[storage]\n\t"
  327. "setnz %[result]\n\t"
  328. : [storage] "+m" (storage), [result] "=q" (res)
  329. :
  330. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  331. );
  332. }
  333. else
  334. {
  335. __asm__ __volatile__
  336. (
  337. "lock; addb %[argument], %[storage]\n\t"
  338. "setnz %[result]\n\t"
  339. : [storage] "+m" (storage), [result] "=q" (res)
  340. : [argument] "iq" (v)
  341. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  342. );
  343. }
  344. #endif
  345. return res;
  346. }
  347. static BOOST_FORCEINLINE bool sub_and_test(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  348. {
  349. bool res;
  350. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  351. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  352. {
  353. __asm__ __volatile__
  354. (
  355. "lock; decb %[storage]\n\t"
  356. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  357. :
  358. : "memory"
  359. );
  360. }
  361. else
  362. {
  363. __asm__ __volatile__
  364. (
  365. "lock; subb %[argument], %[storage]\n\t"
  366. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  367. : [argument] "iq" (v)
  368. : "memory"
  369. );
  370. }
  371. #else
  372. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  373. {
  374. __asm__ __volatile__
  375. (
  376. "lock; decb %[storage]\n\t"
  377. "setnz %[result]\n\t"
  378. : [storage] "+m" (storage), [result] "=q" (res)
  379. :
  380. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  381. );
  382. }
  383. else
  384. {
  385. __asm__ __volatile__
  386. (
  387. "lock; subb %[argument], %[storage]\n\t"
  388. "setnz %[result]\n\t"
  389. : [storage] "+m" (storage), [result] "=q" (res)
  390. : [argument] "iq" (v)
  391. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  392. );
  393. }
  394. #endif
  395. return res;
  396. }
  397. static BOOST_FORCEINLINE bool and_and_test(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  398. {
  399. bool res;
  400. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  401. __asm__ __volatile__
  402. (
  403. "lock; andb %[argument], %[storage]\n\t"
  404. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  405. : [argument] "iq" (v)
  406. : "memory"
  407. );
  408. #else
  409. __asm__ __volatile__
  410. (
  411. "lock; andb %[argument], %[storage]\n\t"
  412. "setnz %[result]\n\t"
  413. : [storage] "+m" (storage), [result] "=q" (res)
  414. : [argument] "iq" (v)
  415. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  416. );
  417. #endif
  418. return res;
  419. }
  420. static BOOST_FORCEINLINE bool or_and_test(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  421. {
  422. bool res;
  423. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  424. __asm__ __volatile__
  425. (
  426. "lock; orb %[argument], %[storage]\n\t"
  427. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  428. : [argument] "iq" (v)
  429. : "memory"
  430. );
  431. #else
  432. __asm__ __volatile__
  433. (
  434. "lock; orb %[argument], %[storage]\n\t"
  435. "setnz %[result]\n\t"
  436. : [storage] "+m" (storage), [result] "=q" (res)
  437. : [argument] "iq" (v)
  438. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  439. );
  440. #endif
  441. return res;
  442. }
  443. static BOOST_FORCEINLINE bool xor_and_test(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  444. {
  445. bool res;
  446. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  447. __asm__ __volatile__
  448. (
  449. "lock; xorb %[argument], %[storage]\n\t"
  450. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  451. : [argument] "iq" (v)
  452. : "memory"
  453. );
  454. #else
  455. __asm__ __volatile__
  456. (
  457. "lock; xorb %[argument], %[storage]\n\t"
  458. "setnz %[result]\n\t"
  459. : [storage] "+m" (storage), [result] "=q" (res)
  460. : [argument] "iq" (v)
  461. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  462. );
  463. #endif
  464. return res;
  465. }
  466. };
  467. template< typename Base, bool Signed >
  468. struct extra_operations< Base, 2u, Signed, true > :
  469. public gcc_x86_extra_operations_common< Base >
  470. {
  471. typedef gcc_x86_extra_operations_common< Base > base_type;
  472. typedef typename base_type::storage_type storage_type;
  473. typedef typename make_storage_type< 4u >::type temp_storage_type;
  474. #define BOOST_ATOMIC_DETAIL_CAS_LOOP(op, original, result)\
  475. __asm__ __volatile__\
  476. (\
  477. ".align 16\n\t"\
  478. "1: movzwl %[orig], %2\n\t"\
  479. op " %w2\n\t"\
  480. "lock; cmpxchgw %w2, %[storage]\n\t"\
  481. "jne 1b"\
  482. : [orig] "+a" (original), [storage] "+m" (storage), "=&q" (result)\
  483. : \
  484. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
  485. )
  486. static BOOST_FORCEINLINE storage_type fetch_negate(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  487. {
  488. storage_type original = storage;
  489. temp_storage_type result;
  490. BOOST_ATOMIC_DETAIL_CAS_LOOP("negw", original, result);
  491. return original;
  492. }
  493. static BOOST_FORCEINLINE storage_type fetch_complement(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  494. {
  495. storage_type original = storage;
  496. temp_storage_type result;
  497. BOOST_ATOMIC_DETAIL_CAS_LOOP("notw", original, result);
  498. return original;
  499. }
  500. static BOOST_FORCEINLINE storage_type negate(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  501. {
  502. storage_type original = storage;
  503. temp_storage_type result;
  504. BOOST_ATOMIC_DETAIL_CAS_LOOP("negw", original, result);
  505. return static_cast< storage_type >(result);
  506. }
  507. static BOOST_FORCEINLINE storage_type bitwise_complement(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  508. {
  509. storage_type original = storage;
  510. temp_storage_type result;
  511. BOOST_ATOMIC_DETAIL_CAS_LOOP("notw", original, result);
  512. return static_cast< storage_type >(result);
  513. }
  514. #undef BOOST_ATOMIC_DETAIL_CAS_LOOP
  515. #define BOOST_ATOMIC_DETAIL_CAS_LOOP(op, argument, original, result)\
  516. __asm__ __volatile__\
  517. (\
  518. ".align 16\n\t"\
  519. "1: mov %[arg], %2\n\t"\
  520. op " %%ax, %w2\n\t"\
  521. "lock; cmpxchgw %w2, %[storage]\n\t"\
  522. "jne 1b"\
  523. : [orig] "+a" (original), [storage] "+m" (storage), "=&q" (result)\
  524. : [arg] "ir" ((temp_storage_type)argument)\
  525. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
  526. )
  527. static BOOST_FORCEINLINE storage_type bitwise_and(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  528. {
  529. storage_type original = storage;
  530. temp_storage_type result;
  531. BOOST_ATOMIC_DETAIL_CAS_LOOP("andw", v, original, result);
  532. return static_cast< storage_type >(result);
  533. }
  534. static BOOST_FORCEINLINE storage_type bitwise_or(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  535. {
  536. storage_type original = storage;
  537. temp_storage_type result;
  538. BOOST_ATOMIC_DETAIL_CAS_LOOP("orw", v, original, result);
  539. return static_cast< storage_type >(result);
  540. }
  541. static BOOST_FORCEINLINE storage_type bitwise_xor(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  542. {
  543. storage_type original = storage;
  544. temp_storage_type result;
  545. BOOST_ATOMIC_DETAIL_CAS_LOOP("xorw", v, original, result);
  546. return static_cast< storage_type >(result);
  547. }
  548. #undef BOOST_ATOMIC_DETAIL_CAS_LOOP
  549. static BOOST_FORCEINLINE bool negate_and_test(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  550. {
  551. return !!negate(storage, order);
  552. }
  553. static BOOST_FORCEINLINE bool complement_and_test(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  554. {
  555. return !!bitwise_complement(storage, order);
  556. }
  557. static BOOST_FORCEINLINE void opaque_add(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  558. {
  559. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  560. {
  561. __asm__ __volatile__
  562. (
  563. "lock; incw %[storage]\n\t"
  564. : [storage] "+m" (storage)
  565. :
  566. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  567. );
  568. }
  569. else
  570. {
  571. __asm__ __volatile__
  572. (
  573. "lock; addw %[argument], %[storage]\n\t"
  574. : [storage] "+m" (storage)
  575. : [argument] "iq" (v)
  576. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  577. );
  578. }
  579. }
  580. static BOOST_FORCEINLINE void opaque_sub(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  581. {
  582. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  583. {
  584. __asm__ __volatile__
  585. (
  586. "lock; decw %[storage]\n\t"
  587. : [storage] "+m" (storage)
  588. :
  589. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  590. );
  591. }
  592. else
  593. {
  594. __asm__ __volatile__
  595. (
  596. "lock; subw %[argument], %[storage]\n\t"
  597. : [storage] "+m" (storage)
  598. : [argument] "iq" (v)
  599. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  600. );
  601. }
  602. }
  603. static BOOST_FORCEINLINE void opaque_negate(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  604. {
  605. __asm__ __volatile__
  606. (
  607. "lock; negw %[storage]\n\t"
  608. : [storage] "+m" (storage)
  609. :
  610. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  611. );
  612. }
  613. static BOOST_FORCEINLINE void opaque_and(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  614. {
  615. __asm__ __volatile__
  616. (
  617. "lock; andw %[argument], %[storage]\n\t"
  618. : [storage] "+m" (storage)
  619. : [argument] "iq" (v)
  620. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  621. );
  622. }
  623. static BOOST_FORCEINLINE void opaque_or(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  624. {
  625. __asm__ __volatile__
  626. (
  627. "lock; orw %[argument], %[storage]\n\t"
  628. : [storage] "+m" (storage)
  629. : [argument] "iq" (v)
  630. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  631. );
  632. }
  633. static BOOST_FORCEINLINE void opaque_xor(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  634. {
  635. __asm__ __volatile__
  636. (
  637. "lock; xorw %[argument], %[storage]\n\t"
  638. : [storage] "+m" (storage)
  639. : [argument] "iq" (v)
  640. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  641. );
  642. }
  643. static BOOST_FORCEINLINE void opaque_complement(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  644. {
  645. __asm__ __volatile__
  646. (
  647. "lock; notw %[storage]\n\t"
  648. : [storage] "+m" (storage)
  649. :
  650. : "memory"
  651. );
  652. }
  653. static BOOST_FORCEINLINE bool add_and_test(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  654. {
  655. bool res;
  656. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  657. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  658. {
  659. __asm__ __volatile__
  660. (
  661. "lock; incw %[storage]\n\t"
  662. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  663. :
  664. : "memory"
  665. );
  666. }
  667. else
  668. {
  669. __asm__ __volatile__
  670. (
  671. "lock; addw %[argument], %[storage]\n\t"
  672. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  673. : [argument] "iq" (v)
  674. : "memory"
  675. );
  676. }
  677. #else
  678. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  679. {
  680. __asm__ __volatile__
  681. (
  682. "lock; incw %[storage]\n\t"
  683. "setnz %[result]\n\t"
  684. : [storage] "+m" (storage), [result] "=q" (res)
  685. :
  686. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  687. );
  688. }
  689. else
  690. {
  691. __asm__ __volatile__
  692. (
  693. "lock; addw %[argument], %[storage]\n\t"
  694. "setnz %[result]\n\t"
  695. : [storage] "+m" (storage), [result] "=q" (res)
  696. : [argument] "iq" (v)
  697. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  698. );
  699. }
  700. #endif
  701. return res;
  702. }
  703. static BOOST_FORCEINLINE bool sub_and_test(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  704. {
  705. bool res;
  706. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  707. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  708. {
  709. __asm__ __volatile__
  710. (
  711. "lock; decw %[storage]\n\t"
  712. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  713. :
  714. : "memory"
  715. );
  716. }
  717. else
  718. {
  719. __asm__ __volatile__
  720. (
  721. "lock; subw %[argument], %[storage]\n\t"
  722. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  723. : [argument] "iq" (v)
  724. : "memory"
  725. );
  726. }
  727. #else
  728. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  729. {
  730. __asm__ __volatile__
  731. (
  732. "lock; decw %[storage]\n\t"
  733. "setnz %[result]\n\t"
  734. : [storage] "+m" (storage), [result] "=q" (res)
  735. :
  736. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  737. );
  738. }
  739. else
  740. {
  741. __asm__ __volatile__
  742. (
  743. "lock; subw %[argument], %[storage]\n\t"
  744. "setnz %[result]\n\t"
  745. : [storage] "+m" (storage), [result] "=q" (res)
  746. : [argument] "iq" (v)
  747. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  748. );
  749. }
  750. #endif
  751. return res;
  752. }
  753. static BOOST_FORCEINLINE bool and_and_test(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  754. {
  755. bool res;
  756. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  757. __asm__ __volatile__
  758. (
  759. "lock; andw %[argument], %[storage]\n\t"
  760. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  761. : [argument] "iq" (v)
  762. : "memory"
  763. );
  764. #else
  765. __asm__ __volatile__
  766. (
  767. "lock; andw %[argument], %[storage]\n\t"
  768. "setnz %[result]\n\t"
  769. : [storage] "+m" (storage), [result] "=q" (res)
  770. : [argument] "iq" (v)
  771. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  772. );
  773. #endif
  774. return res;
  775. }
  776. static BOOST_FORCEINLINE bool or_and_test(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  777. {
  778. bool res;
  779. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  780. __asm__ __volatile__
  781. (
  782. "lock; orw %[argument], %[storage]\n\t"
  783. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  784. : [argument] "iq" (v)
  785. : "memory"
  786. );
  787. #else
  788. __asm__ __volatile__
  789. (
  790. "lock; orw %[argument], %[storage]\n\t"
  791. "setnz %[result]\n\t"
  792. : [storage] "+m" (storage), [result] "=q" (res)
  793. : [argument] "iq" (v)
  794. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  795. );
  796. #endif
  797. return res;
  798. }
  799. static BOOST_FORCEINLINE bool xor_and_test(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  800. {
  801. bool res;
  802. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  803. __asm__ __volatile__
  804. (
  805. "lock; xorw %[argument], %[storage]\n\t"
  806. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  807. : [argument] "iq" (v)
  808. : "memory"
  809. );
  810. #else
  811. __asm__ __volatile__
  812. (
  813. "lock; xorw %[argument], %[storage]\n\t"
  814. "setnz %[result]\n\t"
  815. : [storage] "+m" (storage), [result] "=q" (res)
  816. : [argument] "iq" (v)
  817. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  818. );
  819. #endif
  820. return res;
  821. }
  822. };
  823. template< typename Base, bool Signed >
  824. struct extra_operations< Base, 4u, Signed, true > :
  825. public gcc_x86_extra_operations_common< Base >
  826. {
  827. typedef gcc_x86_extra_operations_common< Base > base_type;
  828. typedef typename base_type::storage_type storage_type;
  829. #define BOOST_ATOMIC_DETAIL_CAS_LOOP(op, original, result)\
  830. __asm__ __volatile__\
  831. (\
  832. ".align 16\n\t"\
  833. "1: mov %[orig], %[res]\n\t"\
  834. op " %[res]\n\t"\
  835. "lock; cmpxchgl %[res], %[storage]\n\t"\
  836. "jne 1b"\
  837. : [orig] "+a" (original), [storage] "+m" (storage), [res] "=&r" (result)\
  838. : \
  839. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
  840. )
  841. static BOOST_FORCEINLINE storage_type fetch_negate(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  842. {
  843. storage_type original = storage;
  844. storage_type result;
  845. BOOST_ATOMIC_DETAIL_CAS_LOOP("negl", original, result);
  846. return original;
  847. }
  848. static BOOST_FORCEINLINE storage_type fetch_complement(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  849. {
  850. storage_type original = storage;
  851. storage_type result;
  852. BOOST_ATOMIC_DETAIL_CAS_LOOP("notl", original, result);
  853. return original;
  854. }
  855. static BOOST_FORCEINLINE storage_type negate(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  856. {
  857. storage_type original = storage;
  858. storage_type result;
  859. BOOST_ATOMIC_DETAIL_CAS_LOOP("negl", original, result);
  860. return result;
  861. }
  862. static BOOST_FORCEINLINE storage_type bitwise_complement(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  863. {
  864. storage_type original = storage;
  865. storage_type result;
  866. BOOST_ATOMIC_DETAIL_CAS_LOOP("notl", original, result);
  867. return result;
  868. }
  869. #undef BOOST_ATOMIC_DETAIL_CAS_LOOP
  870. #define BOOST_ATOMIC_DETAIL_CAS_LOOP(op, argument, original, result)\
  871. __asm__ __volatile__\
  872. (\
  873. ".align 16\n\t"\
  874. "1: mov %[arg], %[res]\n\t"\
  875. op " %%eax, %[res]\n\t"\
  876. "lock; cmpxchgl %[res], %[storage]\n\t"\
  877. "jne 1b"\
  878. : [orig] "+a" (original), [storage] "+m" (storage), [res] "=&r" (result)\
  879. : [arg] "ir" (argument)\
  880. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
  881. )
  882. static BOOST_FORCEINLINE storage_type bitwise_and(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  883. {
  884. storage_type original = storage;
  885. storage_type result;
  886. BOOST_ATOMIC_DETAIL_CAS_LOOP("andl", v, original, result);
  887. return static_cast< storage_type >(result);
  888. }
  889. static BOOST_FORCEINLINE storage_type bitwise_or(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  890. {
  891. storage_type original = storage;
  892. storage_type result;
  893. BOOST_ATOMIC_DETAIL_CAS_LOOP("orl", v, original, result);
  894. return static_cast< storage_type >(result);
  895. }
  896. static BOOST_FORCEINLINE storage_type bitwise_xor(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  897. {
  898. storage_type original = storage;
  899. storage_type result;
  900. BOOST_ATOMIC_DETAIL_CAS_LOOP("xorl", v, original, result);
  901. return static_cast< storage_type >(result);
  902. }
  903. #undef BOOST_ATOMIC_DETAIL_CAS_LOOP
  904. static BOOST_FORCEINLINE bool negate_and_test(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  905. {
  906. return !!negate(storage, order);
  907. }
  908. static BOOST_FORCEINLINE bool complement_and_test(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  909. {
  910. return !!bitwise_complement(storage, order);
  911. }
  912. static BOOST_FORCEINLINE void opaque_add(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  913. {
  914. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  915. {
  916. __asm__ __volatile__
  917. (
  918. "lock; incl %[storage]\n\t"
  919. : [storage] "+m" (storage)
  920. :
  921. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  922. );
  923. }
  924. else
  925. {
  926. __asm__ __volatile__
  927. (
  928. "lock; addl %[argument], %[storage]\n\t"
  929. : [storage] "+m" (storage)
  930. : [argument] "ir" (v)
  931. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  932. );
  933. }
  934. }
  935. static BOOST_FORCEINLINE void opaque_sub(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  936. {
  937. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  938. {
  939. __asm__ __volatile__
  940. (
  941. "lock; decl %[storage]\n\t"
  942. : [storage] "+m" (storage)
  943. :
  944. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  945. );
  946. }
  947. else
  948. {
  949. __asm__ __volatile__
  950. (
  951. "lock; subl %[argument], %[storage]\n\t"
  952. : [storage] "+m" (storage)
  953. : [argument] "ir" (v)
  954. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  955. );
  956. }
  957. }
  958. static BOOST_FORCEINLINE void opaque_negate(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  959. {
  960. __asm__ __volatile__
  961. (
  962. "lock; negl %[storage]\n\t"
  963. : [storage] "+m" (storage)
  964. :
  965. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  966. );
  967. }
  968. static BOOST_FORCEINLINE void opaque_and(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  969. {
  970. __asm__ __volatile__
  971. (
  972. "lock; andl %[argument], %[storage]\n\t"
  973. : [storage] "+m" (storage)
  974. : [argument] "ir" (v)
  975. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  976. );
  977. }
  978. static BOOST_FORCEINLINE void opaque_or(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  979. {
  980. __asm__ __volatile__
  981. (
  982. "lock; orl %[argument], %[storage]\n\t"
  983. : [storage] "+m" (storage)
  984. : [argument] "ir" (v)
  985. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  986. );
  987. }
  988. static BOOST_FORCEINLINE void opaque_xor(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  989. {
  990. __asm__ __volatile__
  991. (
  992. "lock; xorl %[argument], %[storage]\n\t"
  993. : [storage] "+m" (storage)
  994. : [argument] "ir" (v)
  995. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  996. );
  997. }
  998. static BOOST_FORCEINLINE void opaque_complement(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  999. {
  1000. __asm__ __volatile__
  1001. (
  1002. "lock; notl %[storage]\n\t"
  1003. : [storage] "+m" (storage)
  1004. :
  1005. : "memory"
  1006. );
  1007. }
  1008. static BOOST_FORCEINLINE bool add_and_test(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  1009. {
  1010. bool res;
  1011. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  1012. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  1013. {
  1014. __asm__ __volatile__
  1015. (
  1016. "lock; incl %[storage]\n\t"
  1017. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  1018. :
  1019. : "memory"
  1020. );
  1021. }
  1022. else
  1023. {
  1024. __asm__ __volatile__
  1025. (
  1026. "lock; addl %[argument], %[storage]\n\t"
  1027. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  1028. : [argument] "ir" (v)
  1029. : "memory"
  1030. );
  1031. }
  1032. #else
  1033. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  1034. {
  1035. __asm__ __volatile__
  1036. (
  1037. "lock; incl %[storage]\n\t"
  1038. "setnz %[result]\n\t"
  1039. : [storage] "+m" (storage), [result] "=q" (res)
  1040. :
  1041. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1042. );
  1043. }
  1044. else
  1045. {
  1046. __asm__ __volatile__
  1047. (
  1048. "lock; addl %[argument], %[storage]\n\t"
  1049. "setnz %[result]\n\t"
  1050. : [storage] "+m" (storage), [result] "=q" (res)
  1051. : [argument] "ir" (v)
  1052. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1053. );
  1054. }
  1055. #endif
  1056. return res;
  1057. }
  1058. static BOOST_FORCEINLINE bool sub_and_test(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  1059. {
  1060. bool res;
  1061. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  1062. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  1063. {
  1064. __asm__ __volatile__
  1065. (
  1066. "lock; decl %[storage]\n\t"
  1067. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  1068. :
  1069. : "memory"
  1070. );
  1071. }
  1072. else
  1073. {
  1074. __asm__ __volatile__
  1075. (
  1076. "lock; subl %[argument], %[storage]\n\t"
  1077. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  1078. : [argument] "ir" (v)
  1079. : "memory"
  1080. );
  1081. }
  1082. #else
  1083. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  1084. {
  1085. __asm__ __volatile__
  1086. (
  1087. "lock; decl %[storage]\n\t"
  1088. "setnz %[result]\n\t"
  1089. : [storage] "+m" (storage), [result] "=q" (res)
  1090. :
  1091. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1092. );
  1093. }
  1094. else
  1095. {
  1096. __asm__ __volatile__
  1097. (
  1098. "lock; subl %[argument], %[storage]\n\t"
  1099. "setnz %[result]\n\t"
  1100. : [storage] "+m" (storage), [result] "=q" (res)
  1101. : [argument] "ir" (v)
  1102. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1103. );
  1104. }
  1105. #endif
  1106. return res;
  1107. }
  1108. static BOOST_FORCEINLINE bool and_and_test(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  1109. {
  1110. bool res;
  1111. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  1112. __asm__ __volatile__
  1113. (
  1114. "lock; andl %[argument], %[storage]\n\t"
  1115. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  1116. : [argument] "ir" (v)
  1117. : "memory"
  1118. );
  1119. #else
  1120. __asm__ __volatile__
  1121. (
  1122. "lock; andl %[argument], %[storage]\n\t"
  1123. "setnz %[result]\n\t"
  1124. : [storage] "+m" (storage), [result] "=q" (res)
  1125. : [argument] "ir" (v)
  1126. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1127. );
  1128. #endif
  1129. return res;
  1130. }
  1131. static BOOST_FORCEINLINE bool or_and_test(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  1132. {
  1133. bool res;
  1134. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  1135. __asm__ __volatile__
  1136. (
  1137. "lock; orl %[argument], %[storage]\n\t"
  1138. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  1139. : [argument] "ir" (v)
  1140. : "memory"
  1141. );
  1142. #else
  1143. __asm__ __volatile__
  1144. (
  1145. "lock; orl %[argument], %[storage]\n\t"
  1146. "setnz %[result]\n\t"
  1147. : [storage] "+m" (storage), [result] "=q" (res)
  1148. : [argument] "ir" (v)
  1149. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1150. );
  1151. #endif
  1152. return res;
  1153. }
  1154. static BOOST_FORCEINLINE bool xor_and_test(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  1155. {
  1156. bool res;
  1157. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  1158. __asm__ __volatile__
  1159. (
  1160. "lock; xorl %[argument], %[storage]\n\t"
  1161. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  1162. : [argument] "ir" (v)
  1163. : "memory"
  1164. );
  1165. #else
  1166. __asm__ __volatile__
  1167. (
  1168. "lock; xorl %[argument], %[storage]\n\t"
  1169. "setnz %[result]\n\t"
  1170. : [storage] "+m" (storage), [result] "=q" (res)
  1171. : [argument] "ir" (v)
  1172. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1173. );
  1174. #endif
  1175. return res;
  1176. }
  1177. };
  1178. #if defined(__x86_64__)
  1179. template< typename Base, bool Signed >
  1180. struct extra_operations< Base, 8u, Signed, true > :
  1181. public gcc_x86_extra_operations_common< Base >
  1182. {
  1183. typedef gcc_x86_extra_operations_common< Base > base_type;
  1184. typedef typename base_type::storage_type storage_type;
  1185. #define BOOST_ATOMIC_DETAIL_CAS_LOOP(op, original, result)\
  1186. __asm__ __volatile__\
  1187. (\
  1188. ".align 16\n\t"\
  1189. "1: mov %[orig], %[res]\n\t"\
  1190. op " %[res]\n\t"\
  1191. "lock; cmpxchgq %[res], %[storage]\n\t"\
  1192. "jne 1b"\
  1193. : [orig] "+a" (original), [storage] "+m" (storage), [res] "=&r" (result)\
  1194. : \
  1195. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
  1196. )
  1197. static BOOST_FORCEINLINE storage_type fetch_negate(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  1198. {
  1199. storage_type original = storage;
  1200. storage_type result;
  1201. BOOST_ATOMIC_DETAIL_CAS_LOOP("negq", original, result);
  1202. return original;
  1203. }
  1204. static BOOST_FORCEINLINE storage_type fetch_complement(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  1205. {
  1206. storage_type original = storage;
  1207. storage_type result;
  1208. BOOST_ATOMIC_DETAIL_CAS_LOOP("notq", original, result);
  1209. return original;
  1210. }
  1211. static BOOST_FORCEINLINE storage_type negate(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  1212. {
  1213. storage_type original = storage;
  1214. storage_type result;
  1215. BOOST_ATOMIC_DETAIL_CAS_LOOP("negq", original, result);
  1216. return result;
  1217. }
  1218. static BOOST_FORCEINLINE storage_type bitwise_complement(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  1219. {
  1220. storage_type original = storage;
  1221. storage_type result;
  1222. BOOST_ATOMIC_DETAIL_CAS_LOOP("notq", original, result);
  1223. return result;
  1224. }
  1225. #undef BOOST_ATOMIC_DETAIL_CAS_LOOP
  1226. #define BOOST_ATOMIC_DETAIL_CAS_LOOP(op, argument, original, result)\
  1227. __asm__ __volatile__\
  1228. (\
  1229. ".align 16\n\t"\
  1230. "1: mov %[arg], %[res]\n\t"\
  1231. op " %%rax, %[res]\n\t"\
  1232. "lock; cmpxchgq %[res], %[storage]\n\t"\
  1233. "jne 1b"\
  1234. : [orig] "+a" (original), [storage] "+m" (storage), [res] "=&r" (result)\
  1235. : [arg] "r" (argument)\
  1236. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
  1237. )
  1238. static BOOST_FORCEINLINE storage_type bitwise_and(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  1239. {
  1240. storage_type original = storage;
  1241. storage_type result;
  1242. BOOST_ATOMIC_DETAIL_CAS_LOOP("andq", v, original, result);
  1243. return static_cast< storage_type >(result);
  1244. }
  1245. static BOOST_FORCEINLINE storage_type bitwise_or(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  1246. {
  1247. storage_type original = storage;
  1248. storage_type result;
  1249. BOOST_ATOMIC_DETAIL_CAS_LOOP("orq", v, original, result);
  1250. return static_cast< storage_type >(result);
  1251. }
  1252. static BOOST_FORCEINLINE storage_type bitwise_xor(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  1253. {
  1254. storage_type original = storage;
  1255. storage_type result;
  1256. BOOST_ATOMIC_DETAIL_CAS_LOOP("xorq", v, original, result);
  1257. return static_cast< storage_type >(result);
  1258. }
  1259. #undef BOOST_ATOMIC_DETAIL_CAS_LOOP
  1260. static BOOST_FORCEINLINE bool negate_and_test(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  1261. {
  1262. return !!negate(storage, order);
  1263. }
  1264. static BOOST_FORCEINLINE bool complement_and_test(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
  1265. {
  1266. return !!bitwise_complement(storage, order);
  1267. }
  1268. static BOOST_FORCEINLINE void opaque_add(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  1269. {
  1270. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  1271. {
  1272. __asm__ __volatile__
  1273. (
  1274. "lock; incq %[storage]\n\t"
  1275. : [storage] "+m" (storage)
  1276. :
  1277. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1278. );
  1279. }
  1280. else
  1281. {
  1282. __asm__ __volatile__
  1283. (
  1284. "lock; addq %[argument], %[storage]\n\t"
  1285. : [storage] "+m" (storage)
  1286. : [argument] "er" (v)
  1287. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1288. );
  1289. }
  1290. }
  1291. static BOOST_FORCEINLINE void opaque_sub(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  1292. {
  1293. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  1294. {
  1295. __asm__ __volatile__
  1296. (
  1297. "lock; decq %[storage]\n\t"
  1298. : [storage] "+m" (storage)
  1299. :
  1300. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1301. );
  1302. }
  1303. else
  1304. {
  1305. __asm__ __volatile__
  1306. (
  1307. "lock; subq %[argument], %[storage]\n\t"
  1308. : [storage] "+m" (storage)
  1309. : [argument] "er" (v)
  1310. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1311. );
  1312. }
  1313. }
  1314. static BOOST_FORCEINLINE void opaque_negate(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  1315. {
  1316. __asm__ __volatile__
  1317. (
  1318. "lock; negq %[storage]\n\t"
  1319. : [storage] "+m" (storage)
  1320. :
  1321. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1322. );
  1323. }
  1324. static BOOST_FORCEINLINE void opaque_and(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  1325. {
  1326. __asm__ __volatile__
  1327. (
  1328. "lock; andq %[argument], %[storage]\n\t"
  1329. : [storage] "+m" (storage)
  1330. : [argument] "er" (v)
  1331. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1332. );
  1333. }
  1334. static BOOST_FORCEINLINE void opaque_or(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  1335. {
  1336. __asm__ __volatile__
  1337. (
  1338. "lock; orq %[argument], %[storage]\n\t"
  1339. : [storage] "+m" (storage)
  1340. : [argument] "er" (v)
  1341. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1342. );
  1343. }
  1344. static BOOST_FORCEINLINE void opaque_xor(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  1345. {
  1346. __asm__ __volatile__
  1347. (
  1348. "lock; xorq %[argument], %[storage]\n\t"
  1349. : [storage] "+m" (storage)
  1350. : [argument] "er" (v)
  1351. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1352. );
  1353. }
  1354. static BOOST_FORCEINLINE void opaque_complement(storage_type volatile& storage, memory_order) BOOST_NOEXCEPT
  1355. {
  1356. __asm__ __volatile__
  1357. (
  1358. "lock; notq %[storage]\n\t"
  1359. : [storage] "+m" (storage)
  1360. :
  1361. : "memory"
  1362. );
  1363. }
  1364. static BOOST_FORCEINLINE bool add_and_test(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  1365. {
  1366. bool res;
  1367. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  1368. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  1369. {
  1370. __asm__ __volatile__
  1371. (
  1372. "lock; incq %[storage]\n\t"
  1373. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  1374. :
  1375. : "memory"
  1376. );
  1377. }
  1378. else
  1379. {
  1380. __asm__ __volatile__
  1381. (
  1382. "lock; addq %[argument], %[storage]\n\t"
  1383. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  1384. : [argument] "er" (v)
  1385. : "memory"
  1386. );
  1387. }
  1388. #else
  1389. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  1390. {
  1391. __asm__ __volatile__
  1392. (
  1393. "lock; incq %[storage]\n\t"
  1394. "setnz %[result]\n\t"
  1395. : [storage] "+m" (storage), [result] "=q" (res)
  1396. :
  1397. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1398. );
  1399. }
  1400. else
  1401. {
  1402. __asm__ __volatile__
  1403. (
  1404. "lock; addq %[argument], %[storage]\n\t"
  1405. "setnz %[result]\n\t"
  1406. : [storage] "+m" (storage), [result] "=q" (res)
  1407. : [argument] "er" (v)
  1408. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1409. );
  1410. }
  1411. #endif
  1412. return res;
  1413. }
  1414. static BOOST_FORCEINLINE bool sub_and_test(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  1415. {
  1416. bool res;
  1417. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  1418. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  1419. {
  1420. __asm__ __volatile__
  1421. (
  1422. "lock; decq %[storage]\n\t"
  1423. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  1424. :
  1425. : "memory"
  1426. );
  1427. }
  1428. else
  1429. {
  1430. __asm__ __volatile__
  1431. (
  1432. "lock; subq %[argument], %[storage]\n\t"
  1433. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  1434. : [argument] "er" (v)
  1435. : "memory"
  1436. );
  1437. }
  1438. #else
  1439. if (BOOST_ATOMIC_DETAIL_IS_CONSTANT(v) && v == 1)
  1440. {
  1441. __asm__ __volatile__
  1442. (
  1443. "lock; decq %[storage]\n\t"
  1444. "setnz %[result]\n\t"
  1445. : [storage] "+m" (storage), [result] "=q" (res)
  1446. :
  1447. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1448. );
  1449. }
  1450. else
  1451. {
  1452. __asm__ __volatile__
  1453. (
  1454. "lock; subq %[argument], %[storage]\n\t"
  1455. "setnz %[result]\n\t"
  1456. : [storage] "+m" (storage), [result] "=q" (res)
  1457. : [argument] "er" (v)
  1458. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1459. );
  1460. }
  1461. #endif
  1462. return res;
  1463. }
  1464. static BOOST_FORCEINLINE bool and_and_test(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  1465. {
  1466. bool res;
  1467. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  1468. __asm__ __volatile__
  1469. (
  1470. "lock; andq %[argument], %[storage]\n\t"
  1471. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  1472. : [argument] "er" (v)
  1473. : "memory"
  1474. );
  1475. #else
  1476. __asm__ __volatile__
  1477. (
  1478. "lock; andq %[argument], %[storage]\n\t"
  1479. "setnz %[result]\n\t"
  1480. : [storage] "+m" (storage), [result] "=q" (res)
  1481. : [argument] "er" (v)
  1482. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1483. );
  1484. #endif
  1485. return res;
  1486. }
  1487. static BOOST_FORCEINLINE bool or_and_test(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  1488. {
  1489. bool res;
  1490. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  1491. __asm__ __volatile__
  1492. (
  1493. "lock; orq %[argument], %[storage]\n\t"
  1494. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  1495. : [argument] "er" (v)
  1496. : "memory"
  1497. );
  1498. #else
  1499. __asm__ __volatile__
  1500. (
  1501. "lock; orq %[argument], %[storage]\n\t"
  1502. "setnz %[result]\n\t"
  1503. : [storage] "+m" (storage), [result] "=q" (res)
  1504. : [argument] "er" (v)
  1505. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1506. );
  1507. #endif
  1508. return res;
  1509. }
  1510. static BOOST_FORCEINLINE bool xor_and_test(storage_type volatile& storage, storage_type v, memory_order) BOOST_NOEXCEPT
  1511. {
  1512. bool res;
  1513. #if defined(BOOST_ATOMIC_DETAIL_ASM_HAS_FLAG_OUTPUTS)
  1514. __asm__ __volatile__
  1515. (
  1516. "lock; xorq %[argument], %[storage]\n\t"
  1517. : [storage] "+m" (storage), [result] "=@ccnz" (res)
  1518. : [argument] "er" (v)
  1519. : "memory"
  1520. );
  1521. #else
  1522. __asm__ __volatile__
  1523. (
  1524. "lock; xorq %[argument], %[storage]\n\t"
  1525. "setnz %[result]\n\t"
  1526. : [storage] "+m" (storage), [result] "=q" (res)
  1527. : [argument] "er" (v)
  1528. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"
  1529. );
  1530. #endif
  1531. return res;
  1532. }
  1533. };
  1534. #endif // defined(__x86_64__)
  1535. } // namespace detail
  1536. } // namespace atomics
  1537. } // namespace boost
  1538. #endif // BOOST_ATOMIC_DETAIL_EXTRA_OPS_GCC_X86_HPP_INCLUDED_