extra_ops_gcc_aarch64.hpp 51 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421
  1. /*
  2. * Distributed under the Boost Software License, Version 1.0.
  3. * (See accompanying file LICENSE_1_0.txt or copy at
  4. * http://www.boost.org/LICENSE_1_0.txt)
  5. *
  6. * Copyright (c) 2020-2025 Andrey Semashev
  7. */
  8. /*!
  9. * \file atomic/detail/extra_ops_gcc_aarch64.hpp
  10. *
  11. * This header contains implementation of the extra atomic operations for AArch64.
  12. */
  13. #ifndef BOOST_ATOMIC_DETAIL_EXTRA_OPS_GCC_AARCH64_HPP_INCLUDED_
  14. #define BOOST_ATOMIC_DETAIL_EXTRA_OPS_GCC_AARCH64_HPP_INCLUDED_
  15. #include <cstddef>
  16. #include <cstdint>
  17. #include <boost/memory_order.hpp>
  18. #include <boost/atomic/detail/config.hpp>
  19. #include <boost/atomic/detail/platform.hpp>
  20. #include <boost/atomic/detail/storage_traits.hpp>
  21. #include <boost/atomic/detail/extra_operations_fwd.hpp>
  22. #include <boost/atomic/detail/extra_ops_generic.hpp>
  23. #include <boost/atomic/detail/ops_gcc_aarch64_common.hpp>
  24. #include <boost/atomic/detail/capabilities.hpp>
  25. #include <boost/atomic/detail/header.hpp>
  26. #ifdef BOOST_HAS_PRAGMA_ONCE
  27. #pragma once
  28. #endif
  29. namespace boost {
  30. namespace atomics {
  31. namespace detail {
  32. template< typename Base >
  33. struct extra_operations_gcc_aarch64_common :
  34. public Base
  35. {
  36. using base_type = Base;
  37. using storage_type = typename base_type::storage_type;
  38. // Note: For opaque operations prefer operations returning the resulting values instead of the original values
  39. // as these operations require less registers. That is unless LSE is available, in which case
  40. // it is better to use the dedicated atomic instructions. The LSE check is done in the base_type,
  41. // where needed (e.g. for 128-bit operations there are no LSE instructions).
  42. static BOOST_FORCEINLINE void opaque_negate(storage_type volatile& storage, memory_order order) noexcept
  43. {
  44. base_type::negate(storage, order);
  45. }
  46. static BOOST_FORCEINLINE void opaque_complement(storage_type volatile& storage, memory_order order) noexcept
  47. {
  48. base_type::bitwise_complement(storage, order);
  49. }
  50. static BOOST_FORCEINLINE void opaque_add(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  51. {
  52. base_type::add(storage, v, order);
  53. }
  54. static BOOST_FORCEINLINE void opaque_sub(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  55. {
  56. base_type::sub(storage, v, order);
  57. }
  58. static BOOST_FORCEINLINE void opaque_and(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  59. {
  60. base_type::bitwise_and(storage, v, order);
  61. }
  62. static BOOST_FORCEINLINE void opaque_or(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  63. {
  64. base_type::bitwise_or(storage, v, order);
  65. }
  66. static BOOST_FORCEINLINE void opaque_xor(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  67. {
  68. base_type::bitwise_xor(storage, v, order);
  69. }
  70. static BOOST_FORCEINLINE bool negate_and_test(storage_type volatile& storage, memory_order order) noexcept
  71. {
  72. return !!base_type::negate(storage, order);
  73. }
  74. static BOOST_FORCEINLINE bool add_and_test(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  75. {
  76. return !!base_type::add(storage, v, order);
  77. }
  78. static BOOST_FORCEINLINE bool sub_and_test(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  79. {
  80. return !!base_type::sub(storage, v, order);
  81. }
  82. static BOOST_FORCEINLINE bool and_and_test(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  83. {
  84. return !!base_type::bitwise_and(storage, v, order);
  85. }
  86. static BOOST_FORCEINLINE bool or_and_test(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  87. {
  88. return !!base_type::bitwise_or(storage, v, order);
  89. }
  90. static BOOST_FORCEINLINE bool xor_and_test(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  91. {
  92. return !!base_type::bitwise_xor(storage, v, order);
  93. }
  94. static BOOST_FORCEINLINE bool complement_and_test(storage_type volatile& storage, memory_order order) noexcept
  95. {
  96. return !!base_type::bitwise_complement(storage, order);
  97. }
  98. };
  99. template< typename Base, std::size_t Size, bool Signed >
  100. struct extra_operations_gcc_aarch64;
  101. template< typename Base, bool Signed >
  102. struct extra_operations_gcc_aarch64< Base, 1u, Signed > :
  103. public extra_operations_generic< Base, 1u, Signed >
  104. {
  105. using base_type = extra_operations_generic< Base, 1u, Signed >;
  106. using storage_type = typename base_type::storage_type;
  107. static BOOST_FORCEINLINE storage_type fetch_negate(storage_type volatile& storage, memory_order order) noexcept
  108. {
  109. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  110. storage_type original, result;
  111. std::uint32_t tmp;
  112. #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
  113. __asm__ __volatile__\
  114. (\
  115. "1:\n\t"\
  116. "ld" ld_mo "xrb %w[original], %[storage]\n\t"\
  117. "neg %w[result], %w[original]\n\t"\
  118. "st" st_mo "xrb %w[tmp], %w[result], %[storage]\n\t"\
  119. "cbnz %w[tmp], 1b\n\t"\
  120. : [tmp] "=&r" (tmp), [result] "=&r" (result), [storage] "+Q" (storage), [original] "=&r" (original)\
  121. : \
  122. : "memory"\
  123. );
  124. BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
  125. #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
  126. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  127. return original;
  128. }
  129. static BOOST_FORCEINLINE storage_type negate(storage_type volatile& storage, memory_order order) noexcept
  130. {
  131. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  132. storage_type result;
  133. std::uint32_t tmp;
  134. #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
  135. __asm__ __volatile__\
  136. (\
  137. "1:\n\t"\
  138. "ld" ld_mo "xrb %w[result], %[storage]\n\t"\
  139. "neg %w[result], %w[result]\n\t"\
  140. "st" st_mo "xrb %w[tmp], %w[result], %[storage]\n\t"\
  141. "cbnz %w[tmp], 1b\n\t"\
  142. : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [result] "=&r" (result)\
  143. : \
  144. : "memory"\
  145. );
  146. BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
  147. #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
  148. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  149. return result;
  150. }
  151. #if !defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_LSE)
  152. static BOOST_FORCEINLINE storage_type add(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  153. {
  154. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  155. storage_type result;
  156. std::uint32_t tmp;
  157. #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
  158. __asm__ __volatile__\
  159. (\
  160. "1:\n\t"\
  161. "ld" ld_mo "xrb %w[result], %[storage]\n\t"\
  162. "add %w[result], %w[result], %w[value]\n\t"\
  163. "st" st_mo "xrb %w[tmp], %w[result], %[storage]\n\t"\
  164. "cbnz %w[tmp], 1b\n\t"\
  165. : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [result] "=&r" (result)\
  166. : [value] "Ir" (v)\
  167. : "memory"\
  168. );
  169. BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
  170. #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
  171. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  172. return result;
  173. }
  174. static BOOST_FORCEINLINE storage_type sub(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  175. {
  176. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  177. storage_type result;
  178. std::uint32_t tmp;
  179. #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
  180. __asm__ __volatile__\
  181. (\
  182. "1:\n\t"\
  183. "ld" ld_mo "xrb %w[result], %[storage]\n\t"\
  184. "sub %w[result], %w[result], %w[value]\n\t"\
  185. "st" st_mo "xrb %w[tmp], %w[result], %[storage]\n\t"\
  186. "cbnz %w[tmp], 1b\n\t"\
  187. : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [result] "=&r" (result)\
  188. : [value] "Ir" (v)\
  189. : "memory"\
  190. );
  191. BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
  192. #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
  193. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  194. return result;
  195. }
  196. static BOOST_FORCEINLINE storage_type bitwise_and(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  197. {
  198. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  199. storage_type result;
  200. std::uint32_t tmp;
  201. #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
  202. __asm__ __volatile__\
  203. (\
  204. "1:\n\t"\
  205. "ld" ld_mo "xrb %w[result], %[storage]\n\t"\
  206. "and %w[result], %w[result], %w[value]\n\t"\
  207. "st" st_mo "xrb %w[tmp], %w[result], %[storage]\n\t"\
  208. "cbnz %w[tmp], 1b\n\t"\
  209. : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [result] "=&r" (result)\
  210. : [value] "Kr" (v)\
  211. : "memory"\
  212. );
  213. BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
  214. #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
  215. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  216. return result;
  217. }
  218. static BOOST_FORCEINLINE storage_type bitwise_or(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  219. {
  220. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  221. storage_type result;
  222. std::uint32_t tmp;
  223. #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
  224. __asm__ __volatile__\
  225. (\
  226. "1:\n\t"\
  227. "ld" ld_mo "xrb %w[result], %[storage]\n\t"\
  228. "orr %w[result], %w[result], %w[value]\n\t"\
  229. "st" st_mo "xrb %w[tmp], %w[result], %[storage]\n\t"\
  230. "cbnz %w[tmp], 1b\n\t"\
  231. : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [result] "=&r" (result)\
  232. : [value] "Kr" (v)\
  233. : "memory"\
  234. );
  235. BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
  236. #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
  237. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  238. return result;
  239. }
  240. static BOOST_FORCEINLINE storage_type bitwise_xor(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  241. {
  242. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  243. storage_type result;
  244. std::uint32_t tmp;
  245. #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
  246. __asm__ __volatile__\
  247. (\
  248. "1:\n\t"\
  249. "ld" ld_mo "xrb %w[result], %[storage]\n\t"\
  250. "eor %w[result], %w[result], %w[value]\n\t"\
  251. "st" st_mo "xrb %w[tmp], %w[result], %[storage]\n\t"\
  252. "cbnz %w[tmp], 1b\n\t"\
  253. : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [result] "=&r" (result)\
  254. : [value] "Kr" (v)\
  255. : "memory"\
  256. );
  257. BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
  258. #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
  259. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  260. return result;
  261. }
  262. static BOOST_FORCEINLINE storage_type fetch_complement(storage_type volatile& storage, memory_order order) noexcept
  263. {
  264. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  265. storage_type original, result;
  266. std::uint32_t tmp;
  267. #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
  268. __asm__ __volatile__\
  269. (\
  270. "1:\n\t"\
  271. "ld" ld_mo "xrb %w[original], %[storage]\n\t"\
  272. "mvn %w[result], %w[original]\n\t"\
  273. "st" st_mo "xrb %w[tmp], %w[result], %[storage]\n\t"\
  274. "cbnz %w[tmp], 1b\n\t"\
  275. : [tmp] "=&r" (tmp), [result] "=&r" (result), [storage] "+Q" (storage), [original] "=&r" (original)\
  276. : \
  277. : "memory"\
  278. );
  279. BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
  280. #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
  281. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  282. return original;
  283. }
  284. static BOOST_FORCEINLINE storage_type bitwise_complement(storage_type volatile& storage, memory_order order) noexcept
  285. {
  286. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  287. storage_type result;
  288. std::uint32_t tmp;
  289. #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
  290. __asm__ __volatile__\
  291. (\
  292. "1:\n\t"\
  293. "ld" ld_mo "xrb %w[result], %[storage]\n\t"\
  294. "mvn %w[result], %w[result]\n\t"\
  295. "st" st_mo "xrb %w[tmp], %w[result], %[storage]\n\t"\
  296. "cbnz %w[tmp], 1b\n\t"\
  297. : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [result] "=&r" (result)\
  298. : \
  299. : "memory"\
  300. );
  301. BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
  302. #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
  303. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  304. return result;
  305. }
  306. #endif // !defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_LSE)
  307. };
  308. template< typename Base, bool Signed >
  309. struct extra_operations< Base, 1u, Signed, true > :
  310. public extra_operations_gcc_aarch64_common< extra_operations_gcc_aarch64< Base, 1u, Signed > >
  311. {
  312. };
  313. template< typename Base, bool Signed >
  314. struct extra_operations_gcc_aarch64< Base, 2u, Signed > :
  315. public extra_operations_generic< Base, 2u, Signed >
  316. {
  317. using base_type = extra_operations_generic< Base, 2u, Signed >;
  318. using storage_type = typename base_type::storage_type;
  319. static BOOST_FORCEINLINE storage_type fetch_negate(storage_type volatile& storage, memory_order order) noexcept
  320. {
  321. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  322. storage_type original, result;
  323. std::uint32_t tmp;
  324. #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
  325. __asm__ __volatile__\
  326. (\
  327. "1:\n\t"\
  328. "ld" ld_mo "xrh %w[original], %[storage]\n\t"\
  329. "neg %w[result], %w[original]\n\t"\
  330. "st" st_mo "xrh %w[tmp], %w[result], %[storage]\n\t"\
  331. "cbnz %w[tmp], 1b\n\t"\
  332. : [tmp] "=&r" (tmp), [result] "=&r" (result), [storage] "+Q" (storage), [original] "=&r" (original)\
  333. : \
  334. : "memory"\
  335. );
  336. BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
  337. #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
  338. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  339. return original;
  340. }
  341. static BOOST_FORCEINLINE storage_type negate(storage_type volatile& storage, memory_order order) noexcept
  342. {
  343. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  344. storage_type result;
  345. std::uint32_t tmp;
  346. #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
  347. __asm__ __volatile__\
  348. (\
  349. "1:\n\t"\
  350. "ld" ld_mo "xrh %w[result], %[storage]\n\t"\
  351. "neg %w[result], %w[result]\n\t"\
  352. "st" st_mo "xrh %w[tmp], %w[result], %[storage]\n\t"\
  353. "cbnz %w[tmp], 1b\n\t"\
  354. : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [result] "=&r" (result)\
  355. : \
  356. : "memory"\
  357. );
  358. BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
  359. #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
  360. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  361. return result;
  362. }
  363. #if !defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_LSE)
  364. static BOOST_FORCEINLINE storage_type add(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  365. {
  366. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  367. storage_type result;
  368. std::uint32_t tmp;
  369. #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
  370. __asm__ __volatile__\
  371. (\
  372. "1:\n\t"\
  373. "ld" ld_mo "xrh %w[result], %[storage]\n\t"\
  374. "add %w[result], %w[result], %w[value]\n\t"\
  375. "st" st_mo "xrh %w[tmp], %w[result], %[storage]\n\t"\
  376. "cbnz %w[tmp], 1b\n\t"\
  377. : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [result] "=&r" (result)\
  378. : [value] "Ir" (v)\
  379. : "memory"\
  380. );
  381. BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
  382. #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
  383. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  384. return result;
  385. }
  386. static BOOST_FORCEINLINE storage_type sub(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  387. {
  388. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  389. storage_type result;
  390. std::uint32_t tmp;
  391. #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
  392. __asm__ __volatile__\
  393. (\
  394. "1:\n\t"\
  395. "ld" ld_mo "xrh %w[result], %[storage]\n\t"\
  396. "sub %w[result], %w[result], %w[value]\n\t"\
  397. "st" st_mo "xrh %w[tmp], %w[result], %[storage]\n\t"\
  398. "cbnz %w[tmp], 1b\n\t"\
  399. : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [result] "=&r" (result)\
  400. : [value] "Ir" (v)\
  401. : "memory"\
  402. );
  403. BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
  404. #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
  405. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  406. return result;
  407. }
  408. static BOOST_FORCEINLINE storage_type bitwise_and(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  409. {
  410. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  411. storage_type result;
  412. std::uint32_t tmp;
  413. #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
  414. __asm__ __volatile__\
  415. (\
  416. "1:\n\t"\
  417. "ld" ld_mo "xrh %w[result], %[storage]\n\t"\
  418. "and %w[result], %w[result], %w[value]\n\t"\
  419. "st" st_mo "xrh %w[tmp], %w[result], %[storage]\n\t"\
  420. "cbnz %w[tmp], 1b\n\t"\
  421. : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [result] "=&r" (result)\
  422. : [value] "Kr" (v)\
  423. : "memory"\
  424. );
  425. BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
  426. #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
  427. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  428. return result;
  429. }
  430. static BOOST_FORCEINLINE storage_type bitwise_or(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  431. {
  432. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  433. storage_type result;
  434. std::uint32_t tmp;
  435. #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
  436. __asm__ __volatile__\
  437. (\
  438. "1:\n\t"\
  439. "ld" ld_mo "xrh %w[result], %[storage]\n\t"\
  440. "orr %w[result], %w[result], %w[value]\n\t"\
  441. "st" st_mo "xrh %w[tmp], %w[result], %[storage]\n\t"\
  442. "cbnz %w[tmp], 1b\n\t"\
  443. : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [result] "=&r" (result)\
  444. : [value] "Kr" (v)\
  445. : "memory"\
  446. );
  447. BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
  448. #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
  449. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  450. return result;
  451. }
  452. static BOOST_FORCEINLINE storage_type bitwise_xor(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  453. {
  454. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  455. storage_type result;
  456. std::uint32_t tmp;
  457. #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
  458. __asm__ __volatile__\
  459. (\
  460. "1:\n\t"\
  461. "ld" ld_mo "xrh %w[result], %[storage]\n\t"\
  462. "eor %w[result], %w[result], %w[value]\n\t"\
  463. "st" st_mo "xrh %w[tmp], %w[result], %[storage]\n\t"\
  464. "cbnz %w[tmp], 1b\n\t"\
  465. : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [result] "=&r" (result)\
  466. : [value] "Kr" (v)\
  467. : "memory"\
  468. );
  469. BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
  470. #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
  471. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  472. return result;
  473. }
  474. static BOOST_FORCEINLINE storage_type fetch_complement(storage_type volatile& storage, memory_order order) noexcept
  475. {
  476. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  477. storage_type original, result;
  478. std::uint32_t tmp;
  479. #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
  480. __asm__ __volatile__\
  481. (\
  482. "1:\n\t"\
  483. "ld" ld_mo "xrh %w[original], %[storage]\n\t"\
  484. "mvn %w[result], %w[original]\n\t"\
  485. "st" st_mo "xrh %w[tmp], %w[result], %[storage]\n\t"\
  486. "cbnz %w[tmp], 1b\n\t"\
  487. : [tmp] "=&r" (tmp), [result] "=&r" (result), [storage] "+Q" (storage), [original] "=&r" (original)\
  488. : \
  489. : "memory"\
  490. );
  491. BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
  492. #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
  493. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  494. return original;
  495. }
  496. static BOOST_FORCEINLINE storage_type bitwise_complement(storage_type volatile& storage, memory_order order) noexcept
  497. {
  498. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  499. storage_type result;
  500. std::uint32_t tmp;
  501. #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
  502. __asm__ __volatile__\
  503. (\
  504. "1:\n\t"\
  505. "ld" ld_mo "xrh %w[result], %[storage]\n\t"\
  506. "mvn %w[result], %w[result]\n\t"\
  507. "st" st_mo "xrh %w[tmp], %w[result], %[storage]\n\t"\
  508. "cbnz %w[tmp], 1b\n\t"\
  509. : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [result] "=&r" (result)\
  510. : \
  511. : "memory"\
  512. );
  513. BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
  514. #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
  515. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  516. return result;
  517. }
  518. #endif // !defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_LSE)
  519. };
  520. template< typename Base, bool Signed >
  521. struct extra_operations< Base, 2u, Signed, true > :
  522. public extra_operations_gcc_aarch64_common< extra_operations_gcc_aarch64< Base, 2u, Signed > >
  523. {
  524. };
  525. template< typename Base, bool Signed >
  526. struct extra_operations_gcc_aarch64< Base, 4u, Signed > :
  527. public extra_operations_generic< Base, 4u, Signed >
  528. {
  529. using base_type = extra_operations_generic< Base, 4u, Signed >;
  530. using storage_type = typename base_type::storage_type;
  531. static BOOST_FORCEINLINE storage_type fetch_negate(storage_type volatile& storage, memory_order order) noexcept
  532. {
  533. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  534. storage_type original, result;
  535. std::uint32_t tmp;
  536. #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
  537. __asm__ __volatile__\
  538. (\
  539. "1:\n\t"\
  540. "ld" ld_mo "xr %w[original], %[storage]\n\t"\
  541. "neg %w[result], %w[original]\n\t"\
  542. "st" st_mo "xr %w[tmp], %w[result], %[storage]\n\t"\
  543. "cbnz %w[tmp], 1b\n\t"\
  544. : [tmp] "=&r" (tmp), [result] "=&r" (result), [storage] "+Q" (storage), [original] "=&r" (original)\
  545. : \
  546. : "memory"\
  547. );
  548. BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
  549. #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
  550. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  551. return original;
  552. }
  553. static BOOST_FORCEINLINE storage_type negate(storage_type volatile& storage, memory_order order) noexcept
  554. {
  555. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  556. storage_type result;
  557. std::uint32_t tmp;
  558. #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
  559. __asm__ __volatile__\
  560. (\
  561. "1:\n\t"\
  562. "ld" ld_mo "xr %w[result], %[storage]\n\t"\
  563. "neg %w[result], %w[result]\n\t"\
  564. "st" st_mo "xr %w[tmp], %w[result], %[storage]\n\t"\
  565. "cbnz %w[tmp], 1b\n\t"\
  566. : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [result] "=&r" (result)\
  567. : \
  568. : "memory"\
  569. );
  570. BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
  571. #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
  572. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  573. return result;
  574. }
  575. #if !defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_LSE)
  576. static BOOST_FORCEINLINE storage_type add(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  577. {
  578. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  579. storage_type result;
  580. std::uint32_t tmp;
  581. #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
  582. __asm__ __volatile__\
  583. (\
  584. "1:\n\t"\
  585. "ld" ld_mo "xr %w[result], %[storage]\n\t"\
  586. "add %w[result], %w[result], %w[value]\n\t"\
  587. "st" st_mo "xr %w[tmp], %w[result], %[storage]\n\t"\
  588. "cbnz %w[tmp], 1b\n\t"\
  589. : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [result] "=&r" (result)\
  590. : [value] "Ir" (v)\
  591. : "memory"\
  592. );
  593. BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
  594. #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
  595. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  596. return result;
  597. }
  598. static BOOST_FORCEINLINE storage_type sub(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  599. {
  600. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  601. storage_type result;
  602. std::uint32_t tmp;
  603. #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
  604. __asm__ __volatile__\
  605. (\
  606. "1:\n\t"\
  607. "ld" ld_mo "xr %w[result], %[storage]\n\t"\
  608. "sub %w[result], %w[result], %w[value]\n\t"\
  609. "st" st_mo "xr %w[tmp], %w[result], %[storage]\n\t"\
  610. "cbnz %w[tmp], 1b\n\t"\
  611. : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [result] "=&r" (result)\
  612. : [value] "Ir" (v)\
  613. : "memory"\
  614. );
  615. BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
  616. #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
  617. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  618. return result;
  619. }
  620. static BOOST_FORCEINLINE storage_type bitwise_and(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  621. {
  622. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  623. storage_type result;
  624. std::uint32_t tmp;
  625. #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
  626. __asm__ __volatile__\
  627. (\
  628. "1:\n\t"\
  629. "ld" ld_mo "xr %w[result], %[storage]\n\t"\
  630. "and %w[result], %w[result], %w[value]\n\t"\
  631. "st" st_mo "xr %w[tmp], %w[result], %[storage]\n\t"\
  632. "cbnz %w[tmp], 1b\n\t"\
  633. : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [result] "=&r" (result)\
  634. : [value] "Kr" (v)\
  635. : "memory"\
  636. );
  637. BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
  638. #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
  639. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  640. return result;
  641. }
  642. static BOOST_FORCEINLINE storage_type bitwise_or(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  643. {
  644. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  645. storage_type result;
  646. std::uint32_t tmp;
  647. #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
  648. __asm__ __volatile__\
  649. (\
  650. "1:\n\t"\
  651. "ld" ld_mo "xr %w[result], %[storage]\n\t"\
  652. "orr %w[result], %w[result], %w[value]\n\t"\
  653. "st" st_mo "xr %w[tmp], %w[result], %[storage]\n\t"\
  654. "cbnz %w[tmp], 1b\n\t"\
  655. : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [result] "=&r" (result)\
  656. : [value] "Kr" (v)\
  657. : "memory"\
  658. );
  659. BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
  660. #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
  661. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  662. return result;
  663. }
  664. static BOOST_FORCEINLINE storage_type bitwise_xor(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  665. {
  666. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  667. storage_type result;
  668. std::uint32_t tmp;
  669. #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
  670. __asm__ __volatile__\
  671. (\
  672. "1:\n\t"\
  673. "ld" ld_mo "xr %w[result], %[storage]\n\t"\
  674. "eor %w[result], %w[result], %w[value]\n\t"\
  675. "st" st_mo "xr %w[tmp], %w[result], %[storage]\n\t"\
  676. "cbnz %w[tmp], 1b\n\t"\
  677. : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [result] "=&r" (result)\
  678. : [value] "Kr" (v)\
  679. : "memory"\
  680. );
  681. BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
  682. #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
  683. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  684. return result;
  685. }
  686. static BOOST_FORCEINLINE storage_type fetch_complement(storage_type volatile& storage, memory_order order) noexcept
  687. {
  688. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  689. storage_type original, result;
  690. std::uint32_t tmp;
  691. #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
  692. __asm__ __volatile__\
  693. (\
  694. "1:\n\t"\
  695. "ld" ld_mo "xr %w[original], %[storage]\n\t"\
  696. "mvn %w[result], %w[original]\n\t"\
  697. "st" st_mo "xr %w[tmp], %w[result], %[storage]\n\t"\
  698. "cbnz %w[tmp], 1b\n\t"\
  699. : [tmp] "=&r" (tmp), [result] "=&r" (result), [storage] "+Q" (storage), [original] "=&r" (original)\
  700. : \
  701. : "memory"\
  702. );
  703. BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
  704. #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
  705. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  706. return original;
  707. }
  708. static BOOST_FORCEINLINE storage_type bitwise_complement(storage_type volatile& storage, memory_order order) noexcept
  709. {
  710. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  711. storage_type result;
  712. std::uint32_t tmp;
  713. #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
  714. __asm__ __volatile__\
  715. (\
  716. "1:\n\t"\
  717. "ld" ld_mo "xr %w[result], %[storage]\n\t"\
  718. "mvn %w[result], %w[result]\n\t"\
  719. "st" st_mo "xr %w[tmp], %w[result], %[storage]\n\t"\
  720. "cbnz %w[tmp], 1b\n\t"\
  721. : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [result] "=&r" (result)\
  722. : \
  723. : "memory"\
  724. );
  725. BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
  726. #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
  727. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  728. return result;
  729. }
  730. #endif // !defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_LSE)
  731. };
  732. template< typename Base, bool Signed >
  733. struct extra_operations< Base, 4u, Signed, true > :
  734. public extra_operations_gcc_aarch64_common< extra_operations_gcc_aarch64< Base, 4u, Signed > >
  735. {
  736. };
  737. template< typename Base, bool Signed >
  738. struct extra_operations_gcc_aarch64< Base, 8u, Signed > :
  739. public extra_operations_generic< Base, 8u, Signed >
  740. {
  741. using base_type = extra_operations_generic< Base, 8u, Signed >;
  742. using storage_type = typename base_type::storage_type;
  743. static BOOST_FORCEINLINE storage_type fetch_negate(storage_type volatile& storage, memory_order order) noexcept
  744. {
  745. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  746. storage_type original, result;
  747. std::uint32_t tmp;
  748. #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
  749. __asm__ __volatile__\
  750. (\
  751. "1:\n\t"\
  752. "ld" ld_mo "xr %x[original], %[storage]\n\t"\
  753. "neg %x[result], %x[original]\n\t"\
  754. "st" st_mo "xr %w[tmp], %x[result], %[storage]\n\t"\
  755. "cbnz %w[tmp], 1b\n\t"\
  756. : [tmp] "=&r" (tmp), [result] "=&r" (result), [storage] "+Q" (storage), [original] "=&r" (original)\
  757. : \
  758. : "memory"\
  759. );
  760. BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
  761. #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
  762. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  763. return original;
  764. }
  765. static BOOST_FORCEINLINE storage_type negate(storage_type volatile& storage, memory_order order) noexcept
  766. {
  767. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  768. storage_type result;
  769. std::uint32_t tmp;
  770. #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
  771. __asm__ __volatile__\
  772. (\
  773. "1:\n\t"\
  774. "ld" ld_mo "xr %x[result], %[storage]\n\t"\
  775. "neg %x[result], %x[result]\n\t"\
  776. "st" st_mo "xr %w[tmp], %x[result], %[storage]\n\t"\
  777. "cbnz %w[tmp], 1b\n\t"\
  778. : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [result] "=&r" (result)\
  779. : \
  780. : "memory"\
  781. );
  782. BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
  783. #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
  784. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  785. return result;
  786. }
  787. #if !defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_LSE)
  788. static BOOST_FORCEINLINE storage_type add(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  789. {
  790. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  791. storage_type result;
  792. std::uint32_t tmp;
  793. #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
  794. __asm__ __volatile__\
  795. (\
  796. "1:\n\t"\
  797. "ld" ld_mo "xr %x[result], %[storage]\n\t"\
  798. "add %x[result], %x[result], %x[value]\n\t"\
  799. "st" st_mo "xr %w[tmp], %x[result], %[storage]\n\t"\
  800. "cbnz %w[tmp], 1b\n\t"\
  801. : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [result] "=&r" (result)\
  802. : [value] "Ir" (v)\
  803. : "memory"\
  804. );
  805. BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
  806. #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
  807. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  808. return result;
  809. }
  810. static BOOST_FORCEINLINE storage_type sub(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  811. {
  812. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  813. storage_type result;
  814. std::uint32_t tmp;
  815. #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
  816. __asm__ __volatile__\
  817. (\
  818. "1:\n\t"\
  819. "ld" ld_mo "xr %x[result], %[storage]\n\t"\
  820. "sub %x[result], %x[result], %x[value]\n\t"\
  821. "st" st_mo "xr %w[tmp], %x[result], %[storage]\n\t"\
  822. "cbnz %w[tmp], 1b\n\t"\
  823. : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [result] "=&r" (result)\
  824. : [value] "Ir" (v)\
  825. : "memory"\
  826. );
  827. BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
  828. #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
  829. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  830. return result;
  831. }
  832. static BOOST_FORCEINLINE storage_type bitwise_and(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  833. {
  834. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  835. storage_type result;
  836. std::uint32_t tmp;
  837. #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
  838. __asm__ __volatile__\
  839. (\
  840. "1:\n\t"\
  841. "ld" ld_mo "xr %x[result], %[storage]\n\t"\
  842. "and %x[result], %x[result], %x[value]\n\t"\
  843. "st" st_mo "xr %w[tmp], %x[result], %[storage]\n\t"\
  844. "cbnz %w[tmp], 1b\n\t"\
  845. : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [result] "=&r" (result)\
  846. : [value] "Lr" (v)\
  847. : "memory"\
  848. );
  849. BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
  850. #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
  851. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  852. return result;
  853. }
  854. static BOOST_FORCEINLINE storage_type bitwise_or(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  855. {
  856. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  857. storage_type result;
  858. std::uint32_t tmp;
  859. #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
  860. __asm__ __volatile__\
  861. (\
  862. "1:\n\t"\
  863. "ld" ld_mo "xr %x[result], %[storage]\n\t"\
  864. "orr %x[result], %x[result], %x[value]\n\t"\
  865. "st" st_mo "xr %w[tmp], %x[result], %[storage]\n\t"\
  866. "cbnz %w[tmp], 1b\n\t"\
  867. : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [result] "=&r" (result)\
  868. : [value] "Lr" (v)\
  869. : "memory"\
  870. );
  871. BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
  872. #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
  873. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  874. return result;
  875. }
  876. static BOOST_FORCEINLINE storage_type bitwise_xor(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  877. {
  878. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  879. storage_type result;
  880. std::uint32_t tmp;
  881. #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
  882. __asm__ __volatile__\
  883. (\
  884. "1:\n\t"\
  885. "ld" ld_mo "xr %x[result], %[storage]\n\t"\
  886. "eor %x[result], %x[result], %x[value]\n\t"\
  887. "st" st_mo "xr %w[tmp], %x[result], %[storage]\n\t"\
  888. "cbnz %w[tmp], 1b\n\t"\
  889. : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [result] "=&r" (result)\
  890. : [value] "Lr" (v)\
  891. : "memory"\
  892. );
  893. BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
  894. #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
  895. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  896. return result;
  897. }
  898. static BOOST_FORCEINLINE storage_type fetch_complement(storage_type volatile& storage, memory_order order) noexcept
  899. {
  900. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  901. storage_type original, result;
  902. std::uint32_t tmp;
  903. #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
  904. __asm__ __volatile__\
  905. (\
  906. "1:\n\t"\
  907. "ld" ld_mo "xr %x[original], %[storage]\n\t"\
  908. "mvn %x[result], %x[original]\n\t"\
  909. "st" st_mo "xr %w[tmp], %x[result], %[storage]\n\t"\
  910. "cbnz %w[tmp], 1b\n\t"\
  911. : [tmp] "=&r" (tmp), [result] "=&r" (result), [storage] "+Q" (storage), [original] "=&r" (original)\
  912. : \
  913. : "memory"\
  914. );
  915. BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
  916. #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
  917. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  918. return original;
  919. }
  920. static BOOST_FORCEINLINE storage_type bitwise_complement(storage_type volatile& storage, memory_order order) noexcept
  921. {
  922. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  923. storage_type result;
  924. std::uint32_t tmp;
  925. #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
  926. __asm__ __volatile__\
  927. (\
  928. "1:\n\t"\
  929. "ld" ld_mo "xr %x[result], %[storage]\n\t"\
  930. "mvn %x[result], %x[result]\n\t"\
  931. "st" st_mo "xr %w[tmp], %x[result], %[storage]\n\t"\
  932. "cbnz %w[tmp], 1b\n\t"\
  933. : [tmp] "=&r" (tmp), [storage] "+Q" (storage), [result] "=&r" (result)\
  934. : \
  935. : "memory"\
  936. );
  937. BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
  938. #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
  939. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  940. return result;
  941. }
  942. #endif // !defined(BOOST_ATOMIC_DETAIL_AARCH64_HAS_LSE)
  943. };
  944. template< typename Base, bool Signed >
  945. struct extra_operations< Base, 8u, Signed, true > :
  946. public extra_operations_gcc_aarch64_common< extra_operations_gcc_aarch64< Base, 8u, Signed > >
  947. {
  948. };
  949. template< typename Base, bool Signed >
  950. struct extra_operations_gcc_aarch64< Base, 16u, Signed > :
  951. public extra_operations_generic< Base, 16u, Signed >
  952. {
  953. using base_type = extra_operations_generic< Base, 16u, Signed >;
  954. using storage_type = typename base_type::storage_type;
  955. using storage_union = typename base_type::storage_union;
  956. static BOOST_FORCEINLINE storage_type fetch_negate(storage_type volatile& storage, memory_order order) noexcept
  957. {
  958. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  959. storage_union original;
  960. storage_union result;
  961. std::uint32_t tmp;
  962. #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
  963. __asm__ __volatile__\
  964. (\
  965. "1:\n\t"\
  966. "ld" ld_mo "xp %x[original_0], %x[original_1], %[storage]\n\t"\
  967. "mvn %x[result_0], %x[original_0]\n\t"\
  968. "mvn %x[result_1], %x[original_1]\n\t"\
  969. "adds %x[result_" BOOST_ATOMIC_DETAIL_AARCH64_ASM_ARG_LO "], %x[result_" BOOST_ATOMIC_DETAIL_AARCH64_ASM_ARG_LO "], #1\n\t"\
  970. "adc %x[result_" BOOST_ATOMIC_DETAIL_AARCH64_ASM_ARG_HI "], %x[result_" BOOST_ATOMIC_DETAIL_AARCH64_ASM_ARG_HI "], xzr\n\t"\
  971. "st" st_mo "xp %w[tmp], %x[result_0], %x[result_1], %[storage]\n\t"\
  972. "cbnz %w[tmp], 1b\n\t"\
  973. : [tmp] "=&r" (tmp), [storage] "+Q" (storage),\
  974. [original_0] "=&r" (original.as_uint64[0u]), [original_1] "=&r" (original.as_uint64[1u]),\
  975. [result_0] "=&r" (result.as_uint64[0u]), [result_1] "=&r" (result.as_uint64[1u])\
  976. : \
  977. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
  978. );
  979. BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
  980. #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
  981. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  982. return original.as_storage;
  983. }
  984. static BOOST_FORCEINLINE storage_type negate(storage_type volatile& storage, memory_order order) noexcept
  985. {
  986. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  987. storage_union result;
  988. std::uint32_t tmp;
  989. #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
  990. __asm__ __volatile__\
  991. (\
  992. "1:\n\t"\
  993. "ld" ld_mo "xp %x[result_0], %x[result_1], %[storage]\n\t"\
  994. "mvn %x[result_0], %x[result_0]\n\t"\
  995. "mvn %x[result_1], %x[result_1]\n\t"\
  996. "adds %x[result_" BOOST_ATOMIC_DETAIL_AARCH64_ASM_ARG_LO "], %x[result_" BOOST_ATOMIC_DETAIL_AARCH64_ASM_ARG_LO "], #1\n\t"\
  997. "adc %x[result_" BOOST_ATOMIC_DETAIL_AARCH64_ASM_ARG_HI "], %x[result_" BOOST_ATOMIC_DETAIL_AARCH64_ASM_ARG_HI "], xzr\n\t"\
  998. "st" st_mo "xp %w[tmp], %x[result_0], %x[result_1], %[storage]\n\t"\
  999. "cbnz %w[tmp], 1b\n\t"\
  1000. : [tmp] "=&r" (tmp), [storage] "+Q" (storage),\
  1001. [result_0] "=&r" (result.as_uint64[0u]), [result_1] "=&r" (result.as_uint64[1u])\
  1002. : \
  1003. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
  1004. );
  1005. BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
  1006. #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
  1007. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  1008. return result.as_storage;
  1009. }
  1010. static BOOST_FORCEINLINE storage_type add(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  1011. {
  1012. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  1013. storage_union result;
  1014. storage_union value = { v };
  1015. std::uint32_t tmp;
  1016. #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
  1017. __asm__ __volatile__\
  1018. (\
  1019. "1:\n\t"\
  1020. "ld" ld_mo "xp %x[result_0], %x[result_1], %[storage]\n\t"\
  1021. "adds %x[result_" BOOST_ATOMIC_DETAIL_AARCH64_ASM_ARG_LO "], %x[result_" BOOST_ATOMIC_DETAIL_AARCH64_ASM_ARG_LO "], %x[value_" BOOST_ATOMIC_DETAIL_AARCH64_ASM_ARG_LO "]\n\t"\
  1022. "adc %x[result_" BOOST_ATOMIC_DETAIL_AARCH64_ASM_ARG_HI "], %x[result_" BOOST_ATOMIC_DETAIL_AARCH64_ASM_ARG_HI "], %x[value_" BOOST_ATOMIC_DETAIL_AARCH64_ASM_ARG_HI "]\n\t"\
  1023. "st" st_mo "xp %w[tmp], %x[result_0], %x[result_1], %[storage]\n\t"\
  1024. "cbnz %w[tmp], 1b\n\t"\
  1025. : [tmp] "=&r" (tmp), [storage] "+Q" (storage),\
  1026. [result_0] "=&r" (result.as_uint64[0u]), [result_1] "=&r" (result.as_uint64[1u])\
  1027. : [value_0] "r" (value.as_uint64[0u]), [value_1] "r" (value.as_uint64[1u])\
  1028. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
  1029. );
  1030. BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
  1031. #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
  1032. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  1033. return result.as_storage;
  1034. }
  1035. static BOOST_FORCEINLINE storage_type sub(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  1036. {
  1037. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  1038. storage_union result;
  1039. storage_union value = { v };
  1040. std::uint32_t tmp;
  1041. #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
  1042. __asm__ __volatile__\
  1043. (\
  1044. "1:\n\t"\
  1045. "ld" ld_mo "xp %x[result_0], %x[result_1], %[storage]\n\t"\
  1046. "subs %x[result_" BOOST_ATOMIC_DETAIL_AARCH64_ASM_ARG_LO "], %x[result_" BOOST_ATOMIC_DETAIL_AARCH64_ASM_ARG_LO "], %x[value_" BOOST_ATOMIC_DETAIL_AARCH64_ASM_ARG_LO "]\n\t"\
  1047. "sbc %x[result_" BOOST_ATOMIC_DETAIL_AARCH64_ASM_ARG_HI "], %x[result_" BOOST_ATOMIC_DETAIL_AARCH64_ASM_ARG_HI "], %x[value_" BOOST_ATOMIC_DETAIL_AARCH64_ASM_ARG_HI "]\n\t"\
  1048. "st" st_mo "xp %w[tmp], %x[result_0], %x[result_1], %[storage]\n\t"\
  1049. "cbnz %w[tmp], 1b\n\t"\
  1050. : [tmp] "=&r" (tmp), [storage] "+Q" (storage),\
  1051. [result_0] "=&r" (result.as_uint64[0u]), [result_1] "=&r" (result.as_uint64[1u])\
  1052. : [value_0] "r" (value.as_uint64[0u]), [value_1] "r" (value.as_uint64[1u])\
  1053. : BOOST_ATOMIC_DETAIL_ASM_CLOBBER_CC_COMMA "memory"\
  1054. );
  1055. BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
  1056. #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
  1057. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  1058. return result.as_storage;
  1059. }
  1060. static BOOST_FORCEINLINE storage_type bitwise_and(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  1061. {
  1062. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  1063. storage_union result;
  1064. storage_union value = { v };
  1065. std::uint32_t tmp;
  1066. #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
  1067. __asm__ __volatile__\
  1068. (\
  1069. "1:\n\t"\
  1070. "ld" ld_mo "xp %x[result_0], %x[result_1], %[storage]\n\t"\
  1071. "and %x[result_0], %x[result_0], %x[value_0]\n\t"\
  1072. "and %x[result_1], %x[result_1], %x[value_1]\n\t"\
  1073. "st" st_mo "xp %w[tmp], %x[result_0], %x[result_1], %[storage]\n\t"\
  1074. "cbnz %w[tmp], 1b\n\t"\
  1075. : [tmp] "=&r" (tmp), [storage] "+Q" (storage),\
  1076. [result_0] "=&r" (result.as_uint64[0u]), [result_1] "=&r" (result.as_uint64[1u])\
  1077. : [value_0] "Lr" (value.as_uint64[0u]), [value_1] "Lr" (value.as_uint64[1u])\
  1078. : "memory"\
  1079. );
  1080. BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
  1081. #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
  1082. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  1083. return result.as_storage;
  1084. }
  1085. static BOOST_FORCEINLINE storage_type bitwise_or(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  1086. {
  1087. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  1088. storage_union result;
  1089. storage_union value = { v };
  1090. std::uint32_t tmp;
  1091. #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
  1092. __asm__ __volatile__\
  1093. (\
  1094. "1:\n\t"\
  1095. "ld" ld_mo "xp %x[result_0], %x[result_1], %[storage]\n\t"\
  1096. "orr %x[result_0], %x[result_0], %x[value_0]\n\t"\
  1097. "orr %x[result_1], %x[result_1], %x[value_1]\n\t"\
  1098. "st" st_mo "xp %w[tmp], %x[result_0], %x[result_1], %[storage]\n\t"\
  1099. "cbnz %w[tmp], 1b\n\t"\
  1100. : [tmp] "=&r" (tmp), [storage] "+Q" (storage),\
  1101. [result_0] "=&r" (result.as_uint64[0u]), [result_1] "=&r" (result.as_uint64[1u])\
  1102. : [value_0] "Lr" (value.as_uint64[0u]), [value_1] "Lr" (value.as_uint64[1u])\
  1103. : "memory"\
  1104. );
  1105. BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
  1106. #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
  1107. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  1108. return result.as_storage;
  1109. }
  1110. static BOOST_FORCEINLINE storage_type bitwise_xor(storage_type volatile& storage, storage_type v, memory_order order) noexcept
  1111. {
  1112. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  1113. storage_union result;
  1114. storage_union value = { v };
  1115. std::uint32_t tmp;
  1116. #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
  1117. __asm__ __volatile__\
  1118. (\
  1119. "1:\n\t"\
  1120. "ld" ld_mo "xp %x[result_0], %x[result_1], %[storage]\n\t"\
  1121. "eor %x[result_0], %x[result_0], %x[value_0]\n\t"\
  1122. "eor %x[result_1], %x[result_1], %x[value_1]\n\t"\
  1123. "st" st_mo "xp %w[tmp], %x[result_0], %x[result_1], %[storage]\n\t"\
  1124. "cbnz %w[tmp], 1b\n\t"\
  1125. : [tmp] "=&r" (tmp), [storage] "+Q" (storage),\
  1126. [result_0] "=&r" (result.as_uint64[0u]), [result_1] "=&r" (result.as_uint64[1u])\
  1127. : [value_0] "Lr" (value.as_uint64[0u]), [value_1] "Lr" (value.as_uint64[1u])\
  1128. : "memory"\
  1129. );
  1130. BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
  1131. #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
  1132. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  1133. return result.as_storage;
  1134. }
  1135. static BOOST_FORCEINLINE storage_type fetch_complement(storage_type volatile& storage, memory_order order) noexcept
  1136. {
  1137. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  1138. storage_union original;
  1139. storage_union result;
  1140. std::uint32_t tmp;
  1141. #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
  1142. __asm__ __volatile__\
  1143. (\
  1144. "1:\n\t"\
  1145. "ld" ld_mo "xp %x[original_0], %x[original_1], %[storage]\n\t"\
  1146. "mvn %x[result_0], %x[original_0]\n\t"\
  1147. "mvn %x[result_1], %x[original_1]\n\t"\
  1148. "st" st_mo "xp %w[tmp], %x[result_0], %x[result_1], %[storage]\n\t"\
  1149. "cbnz %w[tmp], 1b\n\t"\
  1150. : [tmp] "=&r" (tmp), [storage] "+Q" (storage),\
  1151. [original_0] "=&r" (original.as_uint64[0u]), [original_1] "=&r" (original.as_uint64[1u]),\
  1152. [result_0] "=&r" (result.as_uint64[0u]), [result_1] "=&r" (result.as_uint64[1u])\
  1153. : \
  1154. : "memory"\
  1155. );
  1156. BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
  1157. #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
  1158. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  1159. return original.as_storage;
  1160. }
  1161. static BOOST_FORCEINLINE storage_type bitwise_complement(storage_type volatile& storage, memory_order order) noexcept
  1162. {
  1163. BOOST_ATOMIC_DETAIL_TSAN_RELEASE(&storage, order);
  1164. storage_union result;
  1165. std::uint32_t tmp;
  1166. #define BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN(ld_mo, st_mo)\
  1167. __asm__ __volatile__\
  1168. (\
  1169. "1:\n\t"\
  1170. "ld" ld_mo "xp %x[result_0], %x[result_1], %[storage]\n\t"\
  1171. "mvn %x[result_0], %x[result_0]\n\t"\
  1172. "mvn %x[result_1], %x[result_1]\n\t"\
  1173. "st" st_mo "xp %w[tmp], %x[result_0], %x[result_1], %[storage]\n\t"\
  1174. "cbnz %w[tmp], 1b\n\t"\
  1175. : [tmp] "=&r" (tmp), [storage] "+Q" (storage),\
  1176. [result_0] "=&r" (result.as_uint64[0u]), [result_1] "=&r" (result.as_uint64[1u])\
  1177. : \
  1178. : "memory"\
  1179. );
  1180. BOOST_ATOMIC_DETAIL_AARCH64_MO_SWITCH(order)
  1181. #undef BOOST_ATOMIC_DETAIL_AARCH64_MO_INSN
  1182. BOOST_ATOMIC_DETAIL_TSAN_ACQUIRE(&storage, order);
  1183. return result.as_storage;
  1184. }
  1185. };
  1186. template< typename Base, bool Signed >
  1187. struct extra_operations< Base, 16u, Signed, true > :
  1188. public extra_operations_gcc_aarch64_common< extra_operations_gcc_aarch64< Base, 16u, Signed > >
  1189. {
  1190. };
  1191. } // namespace detail
  1192. } // namespace atomics
  1193. } // namespace boost
  1194. #include <boost/atomic/detail/footer.hpp>
  1195. #endif // BOOST_ATOMIC_DETAIL_EXTRA_OPS_GCC_AARCH64_HPP_INCLUDED_