extra_ops_gcc_arm.hpp 51 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120
  1. /*
  2. * Distributed under the Boost Software License, Version 1.0.
  3. * (See accompanying file LICENSE_1_0.txt or copy at
  4. * http://www.boost.org/LICENSE_1_0.txt)
  5. *
  6. * Copyright (c) 2017-2025 Andrey Semashev
  7. */
  8. /*!
  9. * \file atomic/detail/extra_ops_gcc_arm.hpp
  10. *
  11. * This header contains implementation of the extra atomic operations for ARM.
  12. */
  13. #ifndef BOOST_ATOMIC_DETAIL_EXTRA_OPS_GCC_ARM_HPP_INCLUDED_
  14. #define BOOST_ATOMIC_DETAIL_EXTRA_OPS_GCC_ARM_HPP_INCLUDED_
  15. #include <cstddef>
  16. #include <cstdint>
  17. #include <boost/memory_order.hpp>
  18. #include <boost/atomic/detail/config.hpp>
  19. #include <boost/atomic/detail/platform.hpp>
  20. #include <boost/atomic/detail/storage_traits.hpp>
  21. #include <boost/atomic/detail/extra_operations_fwd.hpp>
  22. #include <boost/atomic/detail/extra_ops_generic.hpp>
  23. #include <boost/atomic/detail/ops_gcc_arm_common.hpp>
  24. #include <boost/atomic/detail/gcc_arm_asm_common.hpp>
  25. #include <boost/atomic/detail/capabilities.hpp>
  26. #include <boost/atomic/detail/header.hpp>
  27. #ifdef BOOST_HAS_PRAGMA_ONCE
  28. #pragma once
  29. #endif
  30. namespace boost {
  31. namespace atomics {
  32. namespace detail {
  33. template< typename Base >
  34. struct extra_operations_gcc_arm_common :
  35. public Base
  36. {
  37. using base_type = Base;
  38. using storage_type = typename base_type::storage_type;
  39. static BOOST_FORCEINLINE void opaque_negate(storage_type volatile& storage, memory_order order) noexcept
  40. {
  41. base_type::fetch_negate(storage, order);
  42. }
  43. static BOOST_FORCEINLINE void opaque_complement(storage_type volatile& storage, memory_order order) noexcept
  44. {
  45. base_type::fetch_complement(storage, order);
  46. }
  47. static BOOST_FORCEINLINE bool negate_and_test(storage_type volatile& storage, memory_order order) noexcept
  48. {
  49. return !!base_type::negate(storage, order);
  50. }
  51. static BOOST_FORCEINLINE bool add_and_test(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  52. {
  53. return !!base_type::add(storage, v, order);
  54. }
  55. static BOOST_FORCEINLINE bool sub_and_test(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  56. {
  57. return !!base_type::sub(storage, v, order);
  58. }
  59. static BOOST_FORCEINLINE bool and_and_test(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  60. {
  61. return !!base_type::bitwise_and(storage, v, order);
  62. }
  63. static BOOST_FORCEINLINE bool or_and_test(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  64. {
  65. return !!base_type::bitwise_or(storage, v, order);
  66. }
  67. static BOOST_FORCEINLINE bool xor_and_test(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  68. {
  69. return !!base_type::bitwise_xor(storage, v, order);
  70. }
  71. static BOOST_FORCEINLINE bool complement_and_test(storage_type volatile& storage, memory_order order) noexcept
  72. {
  73. return !!base_type::bitwise_complement(storage, order);
  74. }
  75. };
  76. template< typename Base, std::size_t Size, bool Signed >
  77. struct extra_operations_gcc_arm;
  78. #if defined(BOOST_ATOMIC_DETAIL_ARM_HAS_LDREXB_STREXB)
  79. template< typename Base, bool Signed >
  80. struct extra_operations_gcc_arm< Base, 1u, Signed > :
  81. public extra_operations_generic< Base, 1u, Signed >
  82. {
  83. using base_type = extra_operations_generic< Base, 1u, Signed >;
  84. using storage_type = typename base_type::storage_type;
  85. using extended_storage_type = typename storage_traits< 4u >::type;
  86. static BOOST_FORCEINLINE storage_type fetch_negate(storage_type volatile& storage, memory_order order) noexcept
  87. {
  88. core_arch_operations_gcc_arm_base::fence_before(order);
  89. std::uint32_t tmp;
  90. extended_storage_type original, result;
  91. __asm__ __volatile__
  92. (
  93. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
  94. "1:\n\t"
  95. "ldrexb %[original], %[storage]\n\t" // original = zero_extend(*(&storage))
  96. "rsb %[result], %[original], #0\n\t" // result = 0 - original
  97. "strexb %[tmp], %[result], %[storage]\n\t" // *(&storage) = result, tmp = store failed
  98. "teq %[tmp], #0\n\t" // flags = tmp==0
  99. "bne 1b\n\t" // if (!flags.equal) goto retry
  100. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
  101. : [original] "=&r" (original), // %0
  102. [result] "=&r" (result), // %1
  103. [tmp] "=&l" (tmp), // %2
  104. [storage] "+Q" (storage) // %3
  105. :
  106. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  107. );
  108. core_arch_operations_gcc_arm_base::fence_after(order);
  109. return static_cast< storage_type >(original);
  110. }
  111. static BOOST_FORCEINLINE storage_type negate(storage_type volatile& storage, memory_order order) noexcept
  112. {
  113. core_arch_operations_gcc_arm_base::fence_before(order);
  114. std::uint32_t tmp;
  115. extended_storage_type original, result;
  116. __asm__ __volatile__
  117. (
  118. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
  119. "1:\n\t"
  120. "ldrexb %[original], %[storage]\n\t" // original = zero_extend(*(&storage))
  121. "rsb %[result], %[original], #0\n\t" // result = 0 - original
  122. "strexb %[tmp], %[result], %[storage]\n\t" // *(&storage) = result, tmp = store failed
  123. "teq %[tmp], #0\n\t" // flags = tmp==0
  124. "bne 1b\n\t" // if (!flags.equal) goto retry
  125. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
  126. : [original] "=&r" (original), // %0
  127. [result] "=&r" (result), // %1
  128. [tmp] "=&l" (tmp), // %2
  129. [storage] "+Q" (storage) // %3
  130. :
  131. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  132. );
  133. core_arch_operations_gcc_arm_base::fence_after(order);
  134. return static_cast< storage_type >(result);
  135. }
  136. static BOOST_FORCEINLINE storage_type add(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  137. {
  138. core_arch_operations_gcc_arm_base::fence_before(order);
  139. std::uint32_t tmp;
  140. extended_storage_type original, result;
  141. __asm__ __volatile__
  142. (
  143. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
  144. "1:\n\t"
  145. "ldrexb %[original], %[storage]\n\t" // original = zero_extend(*(&storage))
  146. "add %[result], %[original], %[value]\n\t" // result = original + value
  147. "strexb %[tmp], %[result], %[storage]\n\t" // *(&storage) = result, tmp = store failed
  148. "teq %[tmp], #0\n\t" // flags = tmp==0
  149. "bne 1b\n\t" // if (!flags.equal) goto retry
  150. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
  151. : [original] "=&r" (original), // %0
  152. [result] "=&r" (result), // %1
  153. [tmp] "=&l" (tmp), // %2
  154. [storage] "+Q" (storage) // %3
  155. : [value] "Ir" (v) // %4
  156. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  157. );
  158. core_arch_operations_gcc_arm_base::fence_after(order);
  159. return static_cast< storage_type >(result);
  160. }
  161. static BOOST_FORCEINLINE storage_type sub(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  162. {
  163. core_arch_operations_gcc_arm_base::fence_before(order);
  164. std::uint32_t tmp;
  165. extended_storage_type original, result;
  166. __asm__ __volatile__
  167. (
  168. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
  169. "1:\n\t"
  170. "ldrexb %[original], %[storage]\n\t" // original = zero_extend(*(&storage))
  171. "sub %[result], %[original], %[value]\n\t" // result = original - value
  172. "strexb %[tmp], %[result], %[storage]\n\t" // *(&storage) = result, tmp = store failed
  173. "teq %[tmp], #0\n\t" // flags = tmp==0
  174. "bne 1b\n\t" // if (!flags.equal) goto retry
  175. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
  176. : [original] "=&r" (original), // %0
  177. [result] "=&r" (result), // %1
  178. [tmp] "=&l" (tmp), // %2
  179. [storage] "+Q" (storage) // %3
  180. : [value] "Ir" (v) // %4
  181. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  182. );
  183. core_arch_operations_gcc_arm_base::fence_after(order);
  184. return static_cast< storage_type >(result);
  185. }
  186. static BOOST_FORCEINLINE storage_type bitwise_and(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  187. {
  188. core_arch_operations_gcc_arm_base::fence_before(order);
  189. std::uint32_t tmp;
  190. extended_storage_type original, result;
  191. __asm__ __volatile__
  192. (
  193. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
  194. "1:\n\t"
  195. "ldrexb %[original], %[storage]\n\t" // original = zero_extend(*(&storage))
  196. "and %[result], %[original], %[value]\n\t" // result = original & value
  197. "strexb %[tmp], %[result], %[storage]\n\t" // *(&storage) = result, tmp = store failed
  198. "teq %[tmp], #0\n\t" // flags = tmp==0
  199. "bne 1b\n\t" // if (!flags.equal) goto retry
  200. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
  201. : [original] "=&r" (original), // %0
  202. [result] "=&r" (result), // %1
  203. [tmp] "=&l" (tmp), // %2
  204. [storage] "+Q" (storage) // %3
  205. : [value] "Ir" (v) // %4
  206. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  207. );
  208. core_arch_operations_gcc_arm_base::fence_after(order);
  209. return static_cast< storage_type >(result);
  210. }
  211. static BOOST_FORCEINLINE storage_type bitwise_or(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  212. {
  213. core_arch_operations_gcc_arm_base::fence_before(order);
  214. std::uint32_t tmp;
  215. extended_storage_type original, result;
  216. __asm__ __volatile__
  217. (
  218. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
  219. "1:\n\t"
  220. "ldrexb %[original], %[storage]\n\t" // original = zero_extend(*(&storage))
  221. "orr %[result], %[original], %[value]\n\t" // result = original | value
  222. "strexb %[tmp], %[result], %[storage]\n\t" // *(&storage) = result, tmp = store failed
  223. "teq %[tmp], #0\n\t" // flags = tmp==0
  224. "bne 1b\n\t" // if (!flags.equal) goto retry
  225. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
  226. : [original] "=&r" (original), // %0
  227. [result] "=&r" (result), // %1
  228. [tmp] "=&l" (tmp), // %2
  229. [storage] "+Q" (storage) // %3
  230. : [value] "Ir" (v) // %4
  231. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  232. );
  233. core_arch_operations_gcc_arm_base::fence_after(order);
  234. return static_cast< storage_type >(result);
  235. }
  236. static BOOST_FORCEINLINE storage_type bitwise_xor(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  237. {
  238. core_arch_operations_gcc_arm_base::fence_before(order);
  239. std::uint32_t tmp;
  240. extended_storage_type original, result;
  241. __asm__ __volatile__
  242. (
  243. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
  244. "1:\n\t"
  245. "ldrexb %[original], %[storage]\n\t" // original = zero_extend(*(&storage))
  246. "eor %[result], %[original], %[value]\n\t" // result = original ^ value
  247. "strexb %[tmp], %[result], %[storage]\n\t" // *(&storage) = result, tmp = store failed
  248. "teq %[tmp], #0\n\t" // flags = tmp==0
  249. "bne 1b\n\t" // if (!flags.equal) goto retry
  250. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
  251. : [original] "=&r" (original), // %0
  252. [result] "=&r" (result), // %1
  253. [tmp] "=&l" (tmp), // %2
  254. [storage] "+Q" (storage) // %3
  255. : [value] "Ir" (v) // %4
  256. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  257. );
  258. core_arch_operations_gcc_arm_base::fence_after(order);
  259. return static_cast< storage_type >(result);
  260. }
  261. static BOOST_FORCEINLINE storage_type fetch_complement(storage_type volatile& storage, memory_order order) noexcept
  262. {
  263. core_arch_operations_gcc_arm_base::fence_before(order);
  264. std::uint32_t tmp;
  265. extended_storage_type original, result;
  266. __asm__ __volatile__
  267. (
  268. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
  269. "1:\n\t"
  270. "ldrexb %[original], %[storage]\n\t" // original = zero_extend(*(&storage))
  271. "mvn %[result], %[original]\n\t" // result = NOT original
  272. "strexb %[tmp], %[result], %[storage]\n\t" // *(&storage) = result, tmp = store failed
  273. "teq %[tmp], #0\n\t" // flags = tmp==0
  274. "bne 1b\n\t" // if (!flags.equal) goto retry
  275. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
  276. : [original] "=&r" (original), // %0
  277. [result] "=&r" (result), // %1
  278. [tmp] "=&l" (tmp), // %2
  279. [storage] "+Q" (storage) // %3
  280. :
  281. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  282. );
  283. core_arch_operations_gcc_arm_base::fence_after(order);
  284. return static_cast< storage_type >(original);
  285. }
  286. static BOOST_FORCEINLINE storage_type bitwise_complement(storage_type volatile& storage, memory_order order) noexcept
  287. {
  288. core_arch_operations_gcc_arm_base::fence_before(order);
  289. std::uint32_t tmp;
  290. extended_storage_type original, result;
  291. __asm__ __volatile__
  292. (
  293. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
  294. "1:\n\t"
  295. "ldrexb %[original], %[storage]\n\t" // original = zero_extend(*(&storage))
  296. "mvn %[result], %[original]\n\t" // result = NOT original
  297. "strexb %[tmp], %[result], %[storage]\n\t" // *(&storage) = result, tmp = store failed
  298. "teq %[tmp], #0\n\t" // flags = tmp==0
  299. "bne 1b\n\t" // if (!flags.equal) goto retry
  300. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
  301. : [original] "=&r" (original), // %0
  302. [result] "=&r" (result), // %1
  303. [tmp] "=&l" (tmp), // %2
  304. [storage] "+Q" (storage) // %3
  305. :
  306. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  307. );
  308. core_arch_operations_gcc_arm_base::fence_after(order);
  309. return static_cast< storage_type >(result);
  310. }
  311. };
  312. template< typename Base, bool Signed >
  313. struct extra_operations< Base, 1u, Signed, true > :
  314. public extra_operations_gcc_arm_common< extra_operations_gcc_arm< Base, 1u, Signed > >
  315. {
  316. };
  317. #endif // defined(BOOST_ATOMIC_DETAIL_ARM_HAS_LDREXB_STREXB)
  318. #if defined(BOOST_ATOMIC_DETAIL_ARM_HAS_LDREXH_STREXH)
  319. template< typename Base, bool Signed >
  320. struct extra_operations_gcc_arm< Base, 2u, Signed > :
  321. public extra_operations_generic< Base, 2u, Signed >
  322. {
  323. using base_type = extra_operations_generic< Base, 2u, Signed >;
  324. using storage_type = typename base_type::storage_type;
  325. using extended_storage_type = typename storage_traits< 4u >::type;
  326. static BOOST_FORCEINLINE storage_type fetch_negate(storage_type volatile& storage, memory_order order) noexcept
  327. {
  328. core_arch_operations_gcc_arm_base::fence_before(order);
  329. std::uint32_t tmp;
  330. extended_storage_type original, result;
  331. __asm__ __volatile__
  332. (
  333. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
  334. "1:\n\t"
  335. "ldrexh %[original], %[storage]\n\t" // original = zero_extend(*(&storage))
  336. "rsb %[result], %[original], #0\n\t" // result = 0 - original
  337. "strexh %[tmp], %[result], %[storage]\n\t" // *(&storage) = result, tmp = store failed
  338. "teq %[tmp], #0\n\t" // flags = tmp==0
  339. "bne 1b\n\t" // if (!flags.equal) goto retry
  340. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
  341. : [original] "=&r" (original), // %0
  342. [result] "=&r" (result), // %1
  343. [tmp] "=&l" (tmp), // %2
  344. [storage] "+Q" (storage) // %3
  345. :
  346. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  347. );
  348. core_arch_operations_gcc_arm_base::fence_after(order);
  349. return static_cast< storage_type >(original);
  350. }
  351. static BOOST_FORCEINLINE storage_type negate(storage_type volatile& storage, memory_order order) noexcept
  352. {
  353. core_arch_operations_gcc_arm_base::fence_before(order);
  354. std::uint32_t tmp;
  355. extended_storage_type original, result;
  356. __asm__ __volatile__
  357. (
  358. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
  359. "1:\n\t"
  360. "ldrexh %[original], %[storage]\n\t" // original = zero_extend(*(&storage))
  361. "rsb %[result], %[original], #0\n\t" // result = 0 - original
  362. "strexh %[tmp], %[result], %[storage]\n\t" // *(&storage) = result, tmp = store failed
  363. "teq %[tmp], #0\n\t" // flags = tmp==0
  364. "bne 1b\n\t" // if (!flags.equal) goto retry
  365. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
  366. : [original] "=&r" (original), // %0
  367. [result] "=&r" (result), // %1
  368. [tmp] "=&l" (tmp), // %2
  369. [storage] "+Q" (storage) // %3
  370. :
  371. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  372. );
  373. core_arch_operations_gcc_arm_base::fence_after(order);
  374. return static_cast< storage_type >(result);
  375. }
  376. static BOOST_FORCEINLINE storage_type add(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  377. {
  378. core_arch_operations_gcc_arm_base::fence_before(order);
  379. std::uint32_t tmp;
  380. extended_storage_type original, result;
  381. __asm__ __volatile__
  382. (
  383. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
  384. "1:\n\t"
  385. "ldrexh %[original], %[storage]\n\t" // original = zero_extend(*(&storage))
  386. "add %[result], %[original], %[value]\n\t" // result = original + value
  387. "strexh %[tmp], %[result], %[storage]\n\t" // *(&storage) = result, tmp = store failed
  388. "teq %[tmp], #0\n\t" // flags = tmp==0
  389. "bne 1b\n\t" // if (!flags.equal) goto retry
  390. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
  391. : [original] "=&r" (original), // %0
  392. [result] "=&r" (result), // %1
  393. [tmp] "=&l" (tmp), // %2
  394. [storage] "+Q" (storage) // %3
  395. : [value] "Ir" (v) // %4
  396. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  397. );
  398. core_arch_operations_gcc_arm_base::fence_after(order);
  399. return static_cast< storage_type >(result);
  400. }
  401. static BOOST_FORCEINLINE storage_type sub(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  402. {
  403. core_arch_operations_gcc_arm_base::fence_before(order);
  404. std::uint32_t tmp;
  405. extended_storage_type original, result;
  406. __asm__ __volatile__
  407. (
  408. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
  409. "1:\n\t"
  410. "ldrexh %[original], %[storage]\n\t" // original = zero_extend(*(&storage))
  411. "sub %[result], %[original], %[value]\n\t" // result = original - value
  412. "strexh %[tmp], %[result], %[storage]\n\t" // *(&storage) = result, tmp = store failed
  413. "teq %[tmp], #0\n\t" // flags = tmp==0
  414. "bne 1b\n\t" // if (!flags.equal) goto retry
  415. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
  416. : [original] "=&r" (original), // %0
  417. [result] "=&r" (result), // %1
  418. [tmp] "=&l" (tmp), // %2
  419. [storage] "+Q" (storage) // %3
  420. : [value] "Ir" (v) // %4
  421. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  422. );
  423. core_arch_operations_gcc_arm_base::fence_after(order);
  424. return static_cast< storage_type >(result);
  425. }
  426. static BOOST_FORCEINLINE storage_type bitwise_and(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  427. {
  428. core_arch_operations_gcc_arm_base::fence_before(order);
  429. std::uint32_t tmp;
  430. extended_storage_type original, result;
  431. __asm__ __volatile__
  432. (
  433. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
  434. "1:\n\t"
  435. "ldrexh %[original], %[storage]\n\t" // original = zero_extend(*(&storage))
  436. "and %[result], %[original], %[value]\n\t" // result = original & value
  437. "strexh %[tmp], %[result], %[storage]\n\t" // *(&storage) = result, tmp = store failed
  438. "teq %[tmp], #0\n\t" // flags = tmp==0
  439. "bne 1b\n\t" // if (!flags.equal) goto retry
  440. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
  441. : [original] "=&r" (original), // %0
  442. [result] "=&r" (result), // %1
  443. [tmp] "=&l" (tmp), // %2
  444. [storage] "+Q" (storage) // %3
  445. : [value] "Ir" (v) // %4
  446. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  447. );
  448. core_arch_operations_gcc_arm_base::fence_after(order);
  449. return static_cast< storage_type >(result);
  450. }
  451. static BOOST_FORCEINLINE storage_type bitwise_or(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  452. {
  453. core_arch_operations_gcc_arm_base::fence_before(order);
  454. std::uint32_t tmp;
  455. extended_storage_type original, result;
  456. __asm__ __volatile__
  457. (
  458. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
  459. "1:\n\t"
  460. "ldrexh %[original], %[storage]\n\t" // original = zero_extend(*(&storage))
  461. "orr %[result], %[original], %[value]\n\t" // result = original | value
  462. "strexh %[tmp], %[result], %[storage]\n\t" // *(&storage) = result, tmp = store failed
  463. "teq %[tmp], #0\n\t" // flags = tmp==0
  464. "bne 1b\n\t" // if (!flags.equal) goto retry
  465. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
  466. : [original] "=&r" (original), // %0
  467. [result] "=&r" (result), // %1
  468. [tmp] "=&l" (tmp), // %2
  469. [storage] "+Q" (storage) // %3
  470. : [value] "Ir" (v) // %4
  471. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  472. );
  473. core_arch_operations_gcc_arm_base::fence_after(order);
  474. return static_cast< storage_type >(result);
  475. }
  476. static BOOST_FORCEINLINE storage_type bitwise_xor(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  477. {
  478. core_arch_operations_gcc_arm_base::fence_before(order);
  479. std::uint32_t tmp;
  480. extended_storage_type original, result;
  481. __asm__ __volatile__
  482. (
  483. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
  484. "1:\n\t"
  485. "ldrexh %[original], %[storage]\n\t" // original = zero_extend(*(&storage))
  486. "eor %[result], %[original], %[value]\n\t" // result = original ^ value
  487. "strexh %[tmp], %[result], %[storage]\n\t" // *(&storage) = result, tmp = store failed
  488. "teq %[tmp], #0\n\t" // flags = tmp==0
  489. "bne 1b\n\t" // if (!flags.equal) goto retry
  490. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
  491. : [original] "=&r" (original), // %0
  492. [result] "=&r" (result), // %1
  493. [tmp] "=&l" (tmp), // %2
  494. [storage] "+Q" (storage) // %3
  495. : [value] "Ir" (v) // %4
  496. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  497. );
  498. core_arch_operations_gcc_arm_base::fence_after(order);
  499. return static_cast< storage_type >(result);
  500. }
  501. static BOOST_FORCEINLINE storage_type fetch_complement(storage_type volatile& storage, memory_order order) noexcept
  502. {
  503. core_arch_operations_gcc_arm_base::fence_before(order);
  504. std::uint32_t tmp;
  505. extended_storage_type original, result;
  506. __asm__ __volatile__
  507. (
  508. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
  509. "1:\n\t"
  510. "ldrexh %[original], %[storage]\n\t" // original = zero_extend(*(&storage))
  511. "mvn %[result], %[original]\n\t" // result = NOT original
  512. "strexh %[tmp], %[result], %[storage]\n\t" // *(&storage) = result, tmp = store failed
  513. "teq %[tmp], #0\n\t" // flags = tmp==0
  514. "bne 1b\n\t" // if (!flags.equal) goto retry
  515. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
  516. : [original] "=&r" (original), // %0
  517. [result] "=&r" (result), // %1
  518. [tmp] "=&l" (tmp), // %2
  519. [storage] "+Q" (storage) // %3
  520. :
  521. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  522. );
  523. core_arch_operations_gcc_arm_base::fence_after(order);
  524. return static_cast< storage_type >(original);
  525. }
  526. static BOOST_FORCEINLINE storage_type bitwise_complement(storage_type volatile& storage, memory_order order) noexcept
  527. {
  528. core_arch_operations_gcc_arm_base::fence_before(order);
  529. std::uint32_t tmp;
  530. extended_storage_type original, result;
  531. __asm__ __volatile__
  532. (
  533. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
  534. "1:\n\t"
  535. "ldrexh %[original], %[storage]\n\t" // original = zero_extend(*(&storage))
  536. "mvn %[result], %[original]\n\t" // result = NOT original
  537. "strexh %[tmp], %[result], %[storage]\n\t" // *(&storage) = result, tmp = store failed
  538. "teq %[tmp], #0\n\t" // flags = tmp==0
  539. "bne 1b\n\t" // if (!flags.equal) goto retry
  540. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
  541. : [original] "=&r" (original), // %0
  542. [result] "=&r" (result), // %1
  543. [tmp] "=&l" (tmp), // %2
  544. [storage] "+Q" (storage) // %3
  545. :
  546. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  547. );
  548. core_arch_operations_gcc_arm_base::fence_after(order);
  549. return static_cast< storage_type >(result);
  550. }
  551. };
  552. template< typename Base, bool Signed >
  553. struct extra_operations< Base, 2u, Signed, true > :
  554. public extra_operations_gcc_arm_common< extra_operations_gcc_arm< Base, 2u, Signed > >
  555. {
  556. };
  557. #endif // defined(BOOST_ATOMIC_DETAIL_ARM_HAS_LDREXH_STREXH)
  558. template< typename Base, bool Signed >
  559. struct extra_operations_gcc_arm< Base, 4u, Signed > :
  560. public extra_operations_generic< Base, 4u, Signed >
  561. {
  562. using base_type = extra_operations_generic< Base, 4u, Signed >;
  563. using storage_type = typename base_type::storage_type;
  564. static BOOST_FORCEINLINE storage_type fetch_negate(storage_type volatile& storage, memory_order order) noexcept
  565. {
  566. core_arch_operations_gcc_arm_base::fence_before(order);
  567. std::uint32_t tmp;
  568. storage_type original, result;
  569. __asm__ __volatile__
  570. (
  571. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
  572. "1:\n\t"
  573. "ldrex %[original], %[storage]\n\t" // original = *(&storage)
  574. "rsb %[result], %[original], #0\n\t" // result = 0 - original
  575. "strex %[tmp], %[result], %[storage]\n\t" // *(&storage) = result, tmp = store failed
  576. "teq %[tmp], #0\n\t" // flags = tmp==0
  577. "bne 1b\n\t" // if (!flags.equal) goto retry
  578. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
  579. : [original] "=&r" (original), // %0
  580. [result] "=&r" (result), // %1
  581. [tmp] "=&l" (tmp), // %2
  582. [storage] "+Q" (storage) // %3
  583. :
  584. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  585. );
  586. core_arch_operations_gcc_arm_base::fence_after(order);
  587. return original;
  588. }
  589. static BOOST_FORCEINLINE storage_type negate(storage_type volatile& storage, memory_order order) noexcept
  590. {
  591. core_arch_operations_gcc_arm_base::fence_before(order);
  592. std::uint32_t tmp;
  593. storage_type original, result;
  594. __asm__ __volatile__
  595. (
  596. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
  597. "1:\n\t"
  598. "ldrex %[original], %[storage]\n\t" // original = *(&storage)
  599. "rsb %[result], %[original], #0\n\t" // result = 0 - original
  600. "strex %[tmp], %[result], %[storage]\n\t" // *(&storage) = result, tmp = store failed
  601. "teq %[tmp], #0\n\t" // flags = tmp==0
  602. "bne 1b\n\t" // if (!flags.equal) goto retry
  603. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
  604. : [original] "=&r" (original), // %0
  605. [result] "=&r" (result), // %1
  606. [tmp] "=&l" (tmp), // %2
  607. [storage] "+Q" (storage) // %3
  608. :
  609. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  610. );
  611. core_arch_operations_gcc_arm_base::fence_after(order);
  612. return result;
  613. }
  614. static BOOST_FORCEINLINE storage_type add(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  615. {
  616. core_arch_operations_gcc_arm_base::fence_before(order);
  617. std::uint32_t tmp;
  618. storage_type original, result;
  619. __asm__ __volatile__
  620. (
  621. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
  622. "1:\n\t"
  623. "ldrex %[original], %[storage]\n\t" // original = *(&storage)
  624. "add %[result], %[original], %[value]\n\t" // result = original + value
  625. "strex %[tmp], %[result], %[storage]\n\t" // *(&storage) = result, tmp = store failed
  626. "teq %[tmp], #0\n\t" // flags = tmp==0
  627. "bne 1b\n\t" // if (!flags.equal) goto retry
  628. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
  629. : [original] "=&r" (original), // %0
  630. [result] "=&r" (result), // %1
  631. [tmp] "=&l" (tmp), // %2
  632. [storage] "+Q" (storage) // %3
  633. : [value] "Ir" (v) // %4
  634. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  635. );
  636. core_arch_operations_gcc_arm_base::fence_after(order);
  637. return result;
  638. }
  639. static BOOST_FORCEINLINE storage_type sub(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  640. {
  641. core_arch_operations_gcc_arm_base::fence_before(order);
  642. std::uint32_t tmp;
  643. storage_type original, result;
  644. __asm__ __volatile__
  645. (
  646. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
  647. "1:\n\t"
  648. "ldrex %[original], %[storage]\n\t" // original = *(&storage)
  649. "sub %[result], %[original], %[value]\n\t" // result = original - value
  650. "strex %[tmp], %[result], %[storage]\n\t" // *(&storage) = result, tmp = store failed
  651. "teq %[tmp], #0\n\t" // flags = tmp==0
  652. "bne 1b\n\t" // if (!flags.equal) goto retry
  653. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
  654. : [original] "=&r" (original), // %0
  655. [result] "=&r" (result), // %1
  656. [tmp] "=&l" (tmp), // %2
  657. [storage] "+Q" (storage) // %3
  658. : [value] "Ir" (v) // %4
  659. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  660. );
  661. core_arch_operations_gcc_arm_base::fence_after(order);
  662. return result;
  663. }
  664. static BOOST_FORCEINLINE storage_type bitwise_and(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  665. {
  666. core_arch_operations_gcc_arm_base::fence_before(order);
  667. std::uint32_t tmp;
  668. storage_type original, result;
  669. __asm__ __volatile__
  670. (
  671. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
  672. "1:\n\t"
  673. "ldrex %[original], %[storage]\n\t" // original = *(&storage)
  674. "and %[result], %[original], %[value]\n\t" // result = original & value
  675. "strex %[tmp], %[result], %[storage]\n\t" // *(&storage) = result, tmp = store failed
  676. "teq %[tmp], #0\n\t" // flags = tmp==0
  677. "bne 1b\n\t" // if (!flags.equal) goto retry
  678. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
  679. : [original] "=&r" (original), // %0
  680. [result] "=&r" (result), // %1
  681. [tmp] "=&l" (tmp), // %2
  682. [storage] "+Q" (storage) // %3
  683. : [value] "Ir" (v) // %4
  684. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  685. );
  686. core_arch_operations_gcc_arm_base::fence_after(order);
  687. return result;
  688. }
  689. static BOOST_FORCEINLINE storage_type bitwise_or(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  690. {
  691. core_arch_operations_gcc_arm_base::fence_before(order);
  692. std::uint32_t tmp;
  693. storage_type original, result;
  694. __asm__ __volatile__
  695. (
  696. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
  697. "1:\n\t"
  698. "ldrex %[original], %[storage]\n\t" // original = *(&storage)
  699. "orr %[result], %[original], %[value]\n\t" // result = original | value
  700. "strex %[tmp], %[result], %[storage]\n\t" // *(&storage) = result, tmp = store failed
  701. "teq %[tmp], #0\n\t" // flags = tmp==0
  702. "bne 1b\n\t" // if (!flags.equal) goto retry
  703. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
  704. : [original] "=&r" (original), // %0
  705. [result] "=&r" (result), // %1
  706. [tmp] "=&l" (tmp), // %2
  707. [storage] "+Q" (storage) // %3
  708. : [value] "Ir" (v) // %4
  709. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  710. );
  711. core_arch_operations_gcc_arm_base::fence_after(order);
  712. return result;
  713. }
  714. static BOOST_FORCEINLINE storage_type bitwise_xor(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  715. {
  716. core_arch_operations_gcc_arm_base::fence_before(order);
  717. std::uint32_t tmp;
  718. storage_type original, result;
  719. __asm__ __volatile__
  720. (
  721. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
  722. "1:\n\t"
  723. "ldrex %[original], %[storage]\n\t" // original = *(&storage)
  724. "eor %[result], %[original], %[value]\n\t" // result = original ^ value
  725. "strex %[tmp], %[result], %[storage]\n\t" // *(&storage) = result, tmp = store failed
  726. "teq %[tmp], #0\n\t" // flags = tmp==0
  727. "bne 1b\n\t" // if (!flags.equal) goto retry
  728. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
  729. : [original] "=&r" (original), // %0
  730. [result] "=&r" (result), // %1
  731. [tmp] "=&l" (tmp), // %2
  732. [storage] "+Q" (storage) // %3
  733. : [value] "Ir" (v) // %4
  734. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  735. );
  736. core_arch_operations_gcc_arm_base::fence_after(order);
  737. return result;
  738. }
  739. static BOOST_FORCEINLINE storage_type fetch_complement(storage_type volatile& storage, memory_order order) noexcept
  740. {
  741. core_arch_operations_gcc_arm_base::fence_before(order);
  742. std::uint32_t tmp;
  743. storage_type original, result;
  744. __asm__ __volatile__
  745. (
  746. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
  747. "1:\n\t"
  748. "ldrex %[original], %[storage]\n\t" // original = *(&storage)
  749. "mvn %[result], %[original]\n\t" // result = NOT original
  750. "strex %[tmp], %[result], %[storage]\n\t" // *(&storage) = result, tmp = store failed
  751. "teq %[tmp], #0\n\t" // flags = tmp==0
  752. "bne 1b\n\t" // if (!flags.equal) goto retry
  753. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
  754. : [original] "=&r" (original), // %0
  755. [result] "=&r" (result), // %1
  756. [tmp] "=&l" (tmp), // %2
  757. [storage] "+Q" (storage) // %3
  758. :
  759. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  760. );
  761. core_arch_operations_gcc_arm_base::fence_after(order);
  762. return original;
  763. }
  764. static BOOST_FORCEINLINE storage_type bitwise_complement(storage_type volatile& storage, memory_order order) noexcept
  765. {
  766. core_arch_operations_gcc_arm_base::fence_before(order);
  767. std::uint32_t tmp;
  768. storage_type original, result;
  769. __asm__ __volatile__
  770. (
  771. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%[tmp])
  772. "1:\n\t"
  773. "ldrex %[original], %[storage]\n\t" // original = *(&storage)
  774. "mvn %[result], %[original]\n\t" // result = NOT original
  775. "strex %[tmp], %[result], %[storage]\n\t" // *(&storage) = result, tmp = store failed
  776. "teq %[tmp], #0\n\t" // flags = tmp==0
  777. "bne 1b\n\t" // if (!flags.equal) goto retry
  778. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%[tmp])
  779. : [original] "=&r" (original), // %0
  780. [result] "=&r" (result), // %1
  781. [tmp] "=&l" (tmp), // %2
  782. [storage] "+Q" (storage) // %3
  783. :
  784. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  785. );
  786. core_arch_operations_gcc_arm_base::fence_after(order);
  787. return result;
  788. }
  789. };
  790. template< typename Base, bool Signed >
  791. struct extra_operations< Base, 4u, Signed, true > :
  792. public extra_operations_gcc_arm_common< extra_operations_gcc_arm< Base, 4u, Signed > >
  793. {
  794. };
  795. #if defined(BOOST_ATOMIC_DETAIL_ARM_HAS_LDREXD_STREXD)
  796. template< typename Base, bool Signed >
  797. struct extra_operations_gcc_arm< Base, 8u, Signed > :
  798. public extra_operations_generic< Base, 8u, Signed >
  799. {
  800. using base_type = extra_operations_generic< Base, 8u, Signed >;
  801. using storage_type = typename base_type::storage_type;
  802. static BOOST_FORCEINLINE storage_type fetch_negate(storage_type volatile& storage, memory_order order) noexcept
  803. {
  804. core_arch_operations_gcc_arm_base::fence_before(order);
  805. storage_type original, result;
  806. std::uint32_t tmp;
  807. __asm__ __volatile__
  808. (
  809. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%0)
  810. "1:\n\t"
  811. "ldrexd %1, %H1, %3\n\t" // original = *(&storage)
  812. "mvn %2, %1\n\t" // result = NOT original
  813. "mvn %H2, %H1\n\t"
  814. "adds " BOOST_ATOMIC_DETAIL_ARM_ASM_ARG_LO(2) ", " BOOST_ATOMIC_DETAIL_ARM_ASM_ARG_LO(2) ", #1\n\t" // result = result + 1
  815. "adc " BOOST_ATOMIC_DETAIL_ARM_ASM_ARG_HI(2) ", " BOOST_ATOMIC_DETAIL_ARM_ASM_ARG_HI(2) ", #0\n\t"
  816. "strexd %0, %2, %H2, %3\n\t" // *(&storage) = result, tmp = store failed
  817. "teq %0, #0\n\t" // flags = tmp==0
  818. "bne 1b\n\t" // if (!flags.equal) goto retry
  819. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%0)
  820. : BOOST_ATOMIC_DETAIL_ARM_ASM_TMPREG_CONSTRAINT(tmp), // %0
  821. "=&r" (original), // %1
  822. "=&r" (result), // %2
  823. "+Q" (storage) // %3
  824. :
  825. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  826. );
  827. core_arch_operations_gcc_arm_base::fence_after(order);
  828. return original;
  829. }
  830. static BOOST_FORCEINLINE storage_type negate(storage_type volatile& storage, memory_order order) noexcept
  831. {
  832. core_arch_operations_gcc_arm_base::fence_before(order);
  833. storage_type original, result;
  834. std::uint32_t tmp;
  835. __asm__ __volatile__
  836. (
  837. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%0)
  838. "1:\n\t"
  839. "ldrexd %1, %H1, %3\n\t" // original = *(&storage)
  840. "mvn %2, %1\n\t" // result = NOT original
  841. "mvn %H2, %H1\n\t"
  842. "adds " BOOST_ATOMIC_DETAIL_ARM_ASM_ARG_LO(2) ", " BOOST_ATOMIC_DETAIL_ARM_ASM_ARG_LO(2) ", #1\n\t" // result = result + 1
  843. "adc " BOOST_ATOMIC_DETAIL_ARM_ASM_ARG_HI(2) ", " BOOST_ATOMIC_DETAIL_ARM_ASM_ARG_HI(2) ", #0\n\t"
  844. "strexd %0, %2, %H2, %3\n\t" // *(&storage) = result, tmp = store failed
  845. "teq %0, #0\n\t" // flags = tmp==0
  846. "bne 1b\n\t" // if (!flags.equal) goto retry
  847. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%0)
  848. : BOOST_ATOMIC_DETAIL_ARM_ASM_TMPREG_CONSTRAINT(tmp), // %0
  849. "=&r" (original), // %1
  850. "=&r" (result), // %2
  851. "+Q" (storage) // %3
  852. :
  853. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  854. );
  855. core_arch_operations_gcc_arm_base::fence_after(order);
  856. return result;
  857. }
  858. static BOOST_FORCEINLINE storage_type add(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  859. {
  860. core_arch_operations_gcc_arm_base::fence_before(order);
  861. storage_type original, result;
  862. std::uint32_t tmp;
  863. __asm__ __volatile__
  864. (
  865. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%0)
  866. "1:\n\t"
  867. "ldrexd %1, %H1, %3\n\t" // original = *(&storage)
  868. "adds " BOOST_ATOMIC_DETAIL_ARM_ASM_ARG_LO(2) ", " BOOST_ATOMIC_DETAIL_ARM_ASM_ARG_LO(1) ", " BOOST_ATOMIC_DETAIL_ARM_ASM_ARG_LO(4) "\n\t" // result = original + value
  869. "adc " BOOST_ATOMIC_DETAIL_ARM_ASM_ARG_HI(2) ", " BOOST_ATOMIC_DETAIL_ARM_ASM_ARG_HI(1) ", " BOOST_ATOMIC_DETAIL_ARM_ASM_ARG_HI(4) "\n\t"
  870. "strexd %0, %2, %H2, %3\n\t" // *(&storage) = result, tmp = store failed
  871. "teq %0, #0\n\t" // flags = tmp==0
  872. "bne 1b\n\t" // if (!flags.equal) goto retry
  873. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%0)
  874. : BOOST_ATOMIC_DETAIL_ARM_ASM_TMPREG_CONSTRAINT(tmp), // %0
  875. "=&r" (original), // %1
  876. "=&r" (result), // %2
  877. "+Q" (storage) // %3
  878. : "r" (v) // %4
  879. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  880. );
  881. core_arch_operations_gcc_arm_base::fence_after(order);
  882. return result;
  883. }
  884. static BOOST_FORCEINLINE storage_type sub(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  885. {
  886. core_arch_operations_gcc_arm_base::fence_before(order);
  887. storage_type original, result;
  888. std::uint32_t tmp;
  889. __asm__ __volatile__
  890. (
  891. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%0)
  892. "1:\n\t"
  893. "ldrexd %1, %H1, %3\n\t" // original = *(&storage)
  894. "subs " BOOST_ATOMIC_DETAIL_ARM_ASM_ARG_LO(2) ", " BOOST_ATOMIC_DETAIL_ARM_ASM_ARG_LO(1) ", " BOOST_ATOMIC_DETAIL_ARM_ASM_ARG_LO(4) "\n\t" // result = original - value
  895. "sbc " BOOST_ATOMIC_DETAIL_ARM_ASM_ARG_HI(2) ", " BOOST_ATOMIC_DETAIL_ARM_ASM_ARG_HI(1) ", " BOOST_ATOMIC_DETAIL_ARM_ASM_ARG_HI(4) "\n\t"
  896. "strexd %0, %2, %H2, %3\n\t" // *(&storage) = result, tmp = store failed
  897. "teq %0, #0\n\t" // flags = tmp==0
  898. "bne 1b\n\t" // if (!flags.equal) goto retry
  899. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%0)
  900. : BOOST_ATOMIC_DETAIL_ARM_ASM_TMPREG_CONSTRAINT(tmp), // %0
  901. "=&r" (original), // %1
  902. "=&r" (result), // %2
  903. "+Q" (storage) // %3
  904. : "r" (v) // %4
  905. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  906. );
  907. core_arch_operations_gcc_arm_base::fence_after(order);
  908. return result;
  909. }
  910. static BOOST_FORCEINLINE storage_type bitwise_and(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  911. {
  912. core_arch_operations_gcc_arm_base::fence_before(order);
  913. storage_type original, result;
  914. std::uint32_t tmp;
  915. __asm__ __volatile__
  916. (
  917. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%0)
  918. "1:\n\t"
  919. "ldrexd %1, %H1, %3\n\t" // original = *(&storage)
  920. "and %2, %1, %4\n\t" // result = original & value
  921. "and %H2, %H1, %H4\n\t"
  922. "strexd %0, %2, %H2, %3\n\t" // *(&storage) = result, tmp = store failed
  923. "teq %0, #0\n\t" // flags = tmp==0
  924. "bne 1b\n\t" // if (!flags.equal) goto retry
  925. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%0)
  926. : BOOST_ATOMIC_DETAIL_ARM_ASM_TMPREG_CONSTRAINT(tmp), // %0
  927. "=&r" (original), // %1
  928. "=&r" (result), // %2
  929. "+Q" (storage) // %3
  930. : "r" (v) // %4
  931. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  932. );
  933. core_arch_operations_gcc_arm_base::fence_after(order);
  934. return result;
  935. }
  936. static BOOST_FORCEINLINE storage_type bitwise_or(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  937. {
  938. core_arch_operations_gcc_arm_base::fence_before(order);
  939. storage_type original, result;
  940. std::uint32_t tmp;
  941. __asm__ __volatile__
  942. (
  943. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%0)
  944. "1:\n\t"
  945. "ldrexd %1, %H1, %3\n\t" // original = *(&storage)
  946. "orr %2, %1, %4\n\t" // result = original | value
  947. "orr %H2, %H1, %H4\n\t"
  948. "strexd %0, %2, %H2, %3\n\t" // *(&storage) = result, tmp = store failed
  949. "teq %0, #0\n\t" // flags = tmp==0
  950. "bne 1b\n\t" // if (!flags.equal) goto retry
  951. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%0)
  952. : BOOST_ATOMIC_DETAIL_ARM_ASM_TMPREG_CONSTRAINT(tmp), // %0
  953. "=&r" (original), // %1
  954. "=&r" (result), // %2
  955. "+Q" (storage) // %3
  956. : "r" (v) // %4
  957. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  958. );
  959. core_arch_operations_gcc_arm_base::fence_after(order);
  960. return result;
  961. }
  962. static BOOST_FORCEINLINE storage_type bitwise_xor(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  963. {
  964. core_arch_operations_gcc_arm_base::fence_before(order);
  965. storage_type original, result;
  966. std::uint32_t tmp;
  967. __asm__ __volatile__
  968. (
  969. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%0)
  970. "1:\n\t"
  971. "ldrexd %1, %H1, %3\n\t" // original = *(&storage)
  972. "eor %2, %1, %4\n\t" // result = original ^ value
  973. "eor %H2, %H1, %H4\n\t"
  974. "strexd %0, %2, %H2, %3\n\t" // *(&storage) = result, tmp = store failed
  975. "teq %0, #0\n\t" // flags = tmp==0
  976. "bne 1b\n\t" // if (!flags.equal) goto retry
  977. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%0)
  978. : BOOST_ATOMIC_DETAIL_ARM_ASM_TMPREG_CONSTRAINT(tmp), // %0
  979. "=&r" (original), // %1
  980. "=&r" (result), // %2
  981. "+Q" (storage) // %3
  982. : "r" (v) // %4
  983. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  984. );
  985. core_arch_operations_gcc_arm_base::fence_after(order);
  986. return result;
  987. }
  988. static BOOST_FORCEINLINE storage_type fetch_complement(storage_type volatile& storage, memory_order order) noexcept
  989. {
  990. core_arch_operations_gcc_arm_base::fence_before(order);
  991. storage_type original, result;
  992. std::uint32_t tmp;
  993. __asm__ __volatile__
  994. (
  995. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%0)
  996. "1:\n\t"
  997. "ldrexd %1, %H1, %3\n\t" // original = *(&storage)
  998. "mvn %2, %1\n\t" // result = NOT original
  999. "mvn %H2, %H1\n\t"
  1000. "strexd %0, %2, %H2, %3\n\t" // *(&storage) = result, tmp = store failed
  1001. "teq %0, #0\n\t" // flags = tmp==0
  1002. "bne 1b\n\t" // if (!flags.equal) goto retry
  1003. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%0)
  1004. : BOOST_ATOMIC_DETAIL_ARM_ASM_TMPREG_CONSTRAINT(tmp), // %0
  1005. "=&r" (original), // %1
  1006. "=&r" (result), // %2
  1007. "+Q" (storage) // %3
  1008. :
  1009. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  1010. );
  1011. core_arch_operations_gcc_arm_base::fence_after(order);
  1012. return original;
  1013. }
  1014. static BOOST_FORCEINLINE storage_type bitwise_complement(storage_type volatile& storage, memory_order order) noexcept
  1015. {
  1016. core_arch_operations_gcc_arm_base::fence_before(order);
  1017. storage_type original, result;
  1018. std::uint32_t tmp;
  1019. __asm__ __volatile__
  1020. (
  1021. BOOST_ATOMIC_DETAIL_ARM_ASM_START(%0)
  1022. "1:\n\t"
  1023. "ldrexd %1, %H1, %3\n\t" // original = *(&storage)
  1024. "mvn %2, %1\n\t" // result = NOT original
  1025. "mvn %H2, %H1\n\t"
  1026. "strexd %0, %2, %H2, %3\n\t" // *(&storage) = result, tmp = store failed
  1027. "teq %0, #0\n\t" // flags = tmp==0
  1028. "bne 1b\n\t" // if (!flags.equal) goto retry
  1029. BOOST_ATOMIC_DETAIL_ARM_ASM_END(%0)
  1030. : BOOST_ATOMIC_DETAIL_ARM_ASM_TMPREG_CONSTRAINT(tmp), // %0
  1031. "=&r" (original), // %1
  1032. "=&r" (result), // %2
  1033. "+Q" (storage) // %3
  1034. :
  1035. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC
  1036. );
  1037. core_arch_operations_gcc_arm_base::fence_after(order);
  1038. return result;
  1039. }
  1040. };
  1041. template< typename Base, bool Signed >
  1042. struct extra_operations< Base, 8u, Signed, true > :
  1043. public extra_operations_gcc_arm_common< extra_operations_gcc_arm< Base, 8u, Signed > >
  1044. {
  1045. };
  1046. #endif // defined(BOOST_ATOMIC_DETAIL_ARM_HAS_LDREXD_STREXD)
  1047. } // namespace detail
  1048. } // namespace atomics
  1049. } // namespace boost
  1050. #include <boost/atomic/detail/footer.hpp>
  1051. #endif // BOOST_ATOMIC_DETAIL_EXTRA_OPS_GCC_ARM_HPP_INCLUDED_