gcc-ppc.hpp 75 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736273727382739274027412742274327442745274627472748274927502751275227532754275527562757275827592760276127622763276427652766276727682769277027712772277327742775277627772778277927802781278227832784278527862787278827892790279127922793279427952796279727982799280028012802280328042805280628072808280928102811281228132814281528162817281828192820282128222823282428252826282728282829283028312832283328342835283628372838283928402841284228432844284528462847284828492850285128522853285428552856285728582859286028612862286328642865286628672868286928702871287228732874287528762877287828792880288128822883288428852886288728882889289028912892289328942895289628972898289929002901290229032904290529062907290829092910291129122913291429152916291729182919292029212922292329242925292629272928292929302931293229332934293529362937293829392940294129422943294429452946294729482949295029512952
  1. #ifndef BOOST_ATOMIC_DETAIL_GCC_PPC_HPP
  2. #define BOOST_ATOMIC_DETAIL_GCC_PPC_HPP
  3. // Copyright (c) 2009 Helge Bahmann
  4. // Copyright (c) 2013 Tim Blechmann
  5. //
  6. // Distributed under the Boost Software License, Version 1.0.
  7. // See accompanying file LICENSE_1_0.txt or copy at
  8. // http://www.boost.org/LICENSE_1_0.txt)
  9. #include <string.h>
  10. #include <cstddef>
  11. #include <boost/cstdint.hpp>
  12. #include <boost/atomic/detail/config.hpp>
  13. #ifdef BOOST_HAS_PRAGMA_ONCE
  14. #pragma once
  15. #endif
  16. /*
  17. Refer to: Motorola: "Programming Environments Manual for 32-Bit
  18. Implementations of the PowerPC Architecture", Appendix E:
  19. "Synchronization Programming Examples" for an explanation of what is
  20. going on here (can be found on the web at various places by the
  21. name "MPCFPE32B.pdf", Google is your friend...)
  22. Most of the atomic operations map to instructions in a relatively
  23. straight-forward fashion, but "load"s may at first glance appear
  24. a bit strange as they map to:
  25. lwz %rX, addr
  26. cmpw %rX, %rX
  27. bne- 1f
  28. 1:
  29. That is, the CPU is forced to perform a branch that "formally" depends
  30. on the value retrieved from memory. This scheme has an overhead of
  31. about 1-2 clock cycles per load, but it allows to map "acquire" to
  32. the "isync" instruction instead of "sync" uniformly and for all type
  33. of atomic operations. Since "isync" has a cost of about 15 clock
  34. cycles, while "sync" hast a cost of about 50 clock cycles, the small
  35. penalty to atomic loads more than compensates for this.
  36. Byte- and halfword-sized atomic values are realized by encoding the
  37. value to be represented into a word, performing sign/zero extension
  38. as appropriate. This means that after add/sub operations the value
  39. needs fixing up to accurately preserve the wrap-around semantic of
  40. the smaller type. (Nothing special needs to be done for the bit-wise
  41. and the "exchange type" operators as the compiler already sees to
  42. it that values carried in registers are extended appropriately and
  43. everything falls into place naturally).
  44. The register constraint "b" instructs gcc to use any register
  45. except r0; this is sometimes required because the encoding for
  46. r0 is used to signify "constant zero" in a number of instructions,
  47. making r0 unusable in this place. For simplicity this constraint
  48. is used everywhere since I am to lazy to look this up on a
  49. per-instruction basis, and ppc has enough registers for this not
  50. to pose a problem.
  51. */
  52. namespace boost {
  53. namespace atomics {
  54. namespace detail {
  55. inline void
  56. ppc_fence_before(memory_order order)
  57. {
  58. switch(order)
  59. {
  60. case memory_order_release:
  61. case memory_order_acq_rel:
  62. #if defined(__powerpc64__)
  63. __asm__ __volatile__ ("lwsync" ::: "memory");
  64. break;
  65. #endif
  66. case memory_order_seq_cst:
  67. __asm__ __volatile__ ("sync" ::: "memory");
  68. default:;
  69. }
  70. }
  71. inline void
  72. ppc_fence_after(memory_order order)
  73. {
  74. switch(order)
  75. {
  76. case memory_order_acquire:
  77. case memory_order_acq_rel:
  78. case memory_order_seq_cst:
  79. __asm__ __volatile__ ("isync");
  80. case memory_order_consume:
  81. __asm__ __volatile__ ("" ::: "memory");
  82. default:;
  83. }
  84. }
  85. inline void
  86. ppc_fence_after_store(memory_order order)
  87. {
  88. switch(order)
  89. {
  90. case memory_order_seq_cst:
  91. __asm__ __volatile__ ("sync");
  92. default:;
  93. }
  94. }
  95. }
  96. }
  97. class atomic_flag
  98. {
  99. private:
  100. atomic_flag(const atomic_flag &) /* = delete */ ;
  101. atomic_flag & operator=(const atomic_flag &) /* = delete */ ;
  102. uint32_t v_;
  103. public:
  104. BOOST_CONSTEXPR atomic_flag(void) BOOST_NOEXCEPT : v_(0) {}
  105. void
  106. clear(memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  107. {
  108. atomics::detail::ppc_fence_before(order);
  109. const_cast<volatile uint32_t &>(v_) = 0;
  110. atomics::detail::ppc_fence_after_store(order);
  111. }
  112. bool
  113. test_and_set(memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  114. {
  115. uint32_t original;
  116. atomics::detail::ppc_fence_before(order);
  117. __asm__ (
  118. "1:\n"
  119. "lwarx %0,%y1\n"
  120. "stwcx. %2,%y1\n"
  121. "bne- 1b\n"
  122. : "=&b" (original), "+Z"(v_)
  123. : "b" (1)
  124. : "cr0"
  125. );
  126. atomics::detail::ppc_fence_after(order);
  127. return original;
  128. }
  129. };
  130. } /* namespace boost */
  131. #define BOOST_ATOMIC_FLAG_LOCK_FREE 2
  132. #include <boost/atomic/detail/base.hpp>
  133. #if !defined(BOOST_ATOMIC_FORCE_FALLBACK)
  134. #define BOOST_ATOMIC_CHAR_LOCK_FREE 2
  135. #define BOOST_ATOMIC_CHAR16_T_LOCK_FREE 2
  136. #define BOOST_ATOMIC_CHAR32_T_LOCK_FREE 2
  137. #define BOOST_ATOMIC_WCHAR_T_LOCK_FREE 2
  138. #define BOOST_ATOMIC_SHORT_LOCK_FREE 2
  139. #define BOOST_ATOMIC_INT_LOCK_FREE 2
  140. #define BOOST_ATOMIC_LONG_LOCK_FREE 2
  141. #define BOOST_ATOMIC_POINTER_LOCK_FREE 2
  142. #if defined(__powerpc64__)
  143. #define BOOST_ATOMIC_LLONG_LOCK_FREE 2
  144. #else
  145. #define BOOST_ATOMIC_LLONG_LOCK_FREE 0
  146. #endif
  147. #define BOOST_ATOMIC_BOOL_LOCK_FREE 2
  148. /* Would like to move the slow-path of failed compare_exchange
  149. (that clears the "success" bit) out-of-line. gcc can in
  150. principle do that using ".subsection"/".previous", but Apple's
  151. binutils seemingly does not understand that. Therefore wrap
  152. the "clear" of the flag in a macro and let it remain
  153. in-line for Apple
  154. */
  155. #if !defined(__APPLE__)
  156. #define BOOST_ATOMIC_ASM_SLOWPATH_CLEAR \
  157. "9:\n" \
  158. ".subsection 2\n" \
  159. "2: addi %1,0,0\n" \
  160. "b 9b\n" \
  161. ".previous\n" \
  162. #else
  163. #define BOOST_ATOMIC_ASM_SLOWPATH_CLEAR \
  164. "b 9f\n" \
  165. "2: addi %1,0,0\n" \
  166. "9:\n" \
  167. #endif
  168. namespace boost {
  169. namespace atomics {
  170. namespace detail {
  171. /* integral types */
  172. template<typename T>
  173. class base_atomic<T, int, 1, true>
  174. {
  175. private:
  176. typedef base_atomic this_type;
  177. typedef T value_type;
  178. typedef int32_t storage_type;
  179. typedef T difference_type;
  180. protected:
  181. typedef value_type value_arg_type;
  182. public:
  183. BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
  184. BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : v_(v) {}
  185. void
  186. store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  187. {
  188. ppc_fence_before(order);
  189. __asm__ (
  190. "stw %1, %0\n"
  191. : "+m"(v_)
  192. : "r" (v)
  193. );
  194. ppc_fence_after_store(order);
  195. }
  196. value_type
  197. load(memory_order order = memory_order_seq_cst) const volatile BOOST_NOEXCEPT
  198. {
  199. value_type v;
  200. __asm__ __volatile__ (
  201. "lwz %0, %1\n"
  202. "cmpw %0, %0\n"
  203. "bne- 1f\n"
  204. "1:\n"
  205. : "=&r" (v)
  206. : "m" (v_)
  207. );
  208. ppc_fence_after(order);
  209. return v;
  210. }
  211. value_type
  212. exchange(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  213. {
  214. value_type original;
  215. ppc_fence_before(order);
  216. __asm__ (
  217. "1:\n"
  218. "lwarx %0,%y1\n"
  219. "stwcx. %2,%y1\n"
  220. "bne- 1b\n"
  221. : "=&b" (original), "+Z"(v_)
  222. : "b" (v)
  223. : "cr0"
  224. );
  225. ppc_fence_after(order);
  226. return original;
  227. }
  228. bool
  229. compare_exchange_weak(
  230. value_type & expected,
  231. value_type desired,
  232. memory_order success_order,
  233. memory_order failure_order) volatile BOOST_NOEXCEPT
  234. {
  235. int success;
  236. ppc_fence_before(success_order);
  237. __asm__(
  238. "lwarx %0,%y2\n"
  239. "cmpw %0, %3\n"
  240. "bne- 2f\n"
  241. "stwcx. %4,%y2\n"
  242. "bne- 2f\n"
  243. "addi %1,0,1\n"
  244. "1:"
  245. BOOST_ATOMIC_ASM_SLOWPATH_CLEAR
  246. : "=&b" (expected), "=&b" (success), "+Z"(v_)
  247. : "b" (expected), "b" (desired)
  248. : "cr0"
  249. );
  250. if (success)
  251. ppc_fence_after(success_order);
  252. else
  253. ppc_fence_after(failure_order);
  254. return success;
  255. }
  256. bool
  257. compare_exchange_strong(
  258. value_type & expected,
  259. value_type desired,
  260. memory_order success_order,
  261. memory_order failure_order) volatile BOOST_NOEXCEPT
  262. {
  263. int success;
  264. ppc_fence_before(success_order);
  265. __asm__(
  266. "0: lwarx %0,%y2\n"
  267. "cmpw %0, %3\n"
  268. "bne- 2f\n"
  269. "stwcx. %4,%y2\n"
  270. "bne- 0b\n"
  271. "addi %1,0,1\n"
  272. "1:"
  273. BOOST_ATOMIC_ASM_SLOWPATH_CLEAR
  274. : "=&b" (expected), "=&b" (success), "+Z"(v_)
  275. : "b" (expected), "b" (desired)
  276. : "cr0"
  277. );
  278. if (success)
  279. ppc_fence_after(success_order);
  280. else
  281. ppc_fence_after(failure_order);
  282. return success;
  283. }
  284. value_type
  285. fetch_add(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  286. {
  287. value_type original, tmp;
  288. ppc_fence_before(order);
  289. __asm__ (
  290. "1:\n"
  291. "lwarx %0,%y2\n"
  292. "add %1,%0,%3\n"
  293. "extsb %1, %1\n"
  294. "stwcx. %1,%y2\n"
  295. "bne- 1b\n"
  296. : "=&b" (original), "=&b" (tmp), "+Z"(v_)
  297. : "b" (v)
  298. : "cc");
  299. ppc_fence_after(order);
  300. return original;
  301. }
  302. value_type
  303. fetch_sub(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  304. {
  305. value_type original, tmp;
  306. ppc_fence_before(order);
  307. __asm__ (
  308. "1:\n"
  309. "lwarx %0,%y2\n"
  310. "sub %1,%0,%3\n"
  311. "extsb %1, %1\n"
  312. "stwcx. %1,%y2\n"
  313. "bne- 1b\n"
  314. : "=&b" (original), "=&b" (tmp), "+Z"(v_)
  315. : "b" (v)
  316. : "cc");
  317. ppc_fence_after(order);
  318. return original;
  319. }
  320. value_type
  321. fetch_and(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  322. {
  323. value_type original, tmp;
  324. ppc_fence_before(order);
  325. __asm__ (
  326. "1:\n"
  327. "lwarx %0,%y2\n"
  328. "and %1,%0,%3\n"
  329. "stwcx. %1,%y2\n"
  330. "bne- 1b\n"
  331. : "=&b" (original), "=&b" (tmp), "+Z"(v_)
  332. : "b" (v)
  333. : "cc");
  334. ppc_fence_after(order);
  335. return original;
  336. }
  337. value_type
  338. fetch_or(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  339. {
  340. value_type original, tmp;
  341. ppc_fence_before(order);
  342. __asm__ (
  343. "1:\n"
  344. "lwarx %0,%y2\n"
  345. "or %1,%0,%3\n"
  346. "stwcx. %1,%y2\n"
  347. "bne- 1b\n"
  348. : "=&b" (original), "=&b" (tmp), "+Z"(v_)
  349. : "b" (v)
  350. : "cc");
  351. ppc_fence_after(order);
  352. return original;
  353. }
  354. value_type
  355. fetch_xor(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  356. {
  357. value_type original, tmp;
  358. ppc_fence_before(order);
  359. __asm__ (
  360. "1:\n"
  361. "lwarx %0,%y2\n"
  362. "xor %1,%0,%3\n"
  363. "stwcx. %1,%y2\n"
  364. "bne- 1b\n"
  365. : "=&b" (original), "=&b" (tmp), "+Z"(v_)
  366. : "b" (v)
  367. : "cc");
  368. ppc_fence_after(order);
  369. return original;
  370. }
  371. bool
  372. is_lock_free(void) const volatile BOOST_NOEXCEPT
  373. {
  374. return true;
  375. }
  376. BOOST_ATOMIC_DECLARE_INTEGRAL_OPERATORS
  377. BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
  378. BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
  379. private:
  380. storage_type v_;
  381. };
  382. template<typename T>
  383. class base_atomic<T, int, 1, false>
  384. {
  385. private:
  386. typedef base_atomic this_type;
  387. typedef T value_type;
  388. typedef uint32_t storage_type;
  389. typedef T difference_type;
  390. protected:
  391. typedef value_type value_arg_type;
  392. public:
  393. BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
  394. BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : v_(v) {}
  395. void
  396. store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  397. {
  398. ppc_fence_before(order);
  399. __asm__ (
  400. "stw %1, %0\n"
  401. : "+m"(v_)
  402. : "r" (v)
  403. );
  404. ppc_fence_after_store(order);
  405. }
  406. value_type
  407. load(memory_order order = memory_order_seq_cst) const volatile BOOST_NOEXCEPT
  408. {
  409. value_type v;
  410. __asm__ __volatile__ (
  411. "lwz %0, %1\n"
  412. "cmpw %0, %0\n"
  413. "bne- 1f\n"
  414. "1:\n"
  415. : "=&r" (v)
  416. : "m" (v_)
  417. );
  418. ppc_fence_after(order);
  419. return v;
  420. }
  421. value_type
  422. exchange(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  423. {
  424. value_type original;
  425. ppc_fence_before(order);
  426. __asm__ (
  427. "1:\n"
  428. "lwarx %0,%y1\n"
  429. "stwcx. %2,%y1\n"
  430. "bne- 1b\n"
  431. : "=&b" (original), "+Z"(v_)
  432. : "b" (v)
  433. : "cr0"
  434. );
  435. ppc_fence_after(order);
  436. return original;
  437. }
  438. bool
  439. compare_exchange_weak(
  440. value_type & expected,
  441. value_type desired,
  442. memory_order success_order,
  443. memory_order failure_order) volatile BOOST_NOEXCEPT
  444. {
  445. int success;
  446. ppc_fence_before(success_order);
  447. __asm__(
  448. "lwarx %0,%y2\n"
  449. "cmpw %0, %3\n"
  450. "bne- 2f\n"
  451. "stwcx. %4,%y2\n"
  452. "bne- 2f\n"
  453. "addi %1,0,1\n"
  454. "1:"
  455. BOOST_ATOMIC_ASM_SLOWPATH_CLEAR
  456. : "=&b" (expected), "=&b" (success), "+Z"(v_)
  457. : "b" (expected), "b" (desired)
  458. : "cr0"
  459. );
  460. if (success)
  461. ppc_fence_after(success_order);
  462. else
  463. ppc_fence_after(failure_order);
  464. return success;
  465. }
  466. bool
  467. compare_exchange_strong(
  468. value_type & expected,
  469. value_type desired,
  470. memory_order success_order,
  471. memory_order failure_order) volatile BOOST_NOEXCEPT
  472. {
  473. int success;
  474. ppc_fence_before(success_order);
  475. __asm__(
  476. "0: lwarx %0,%y2\n"
  477. "cmpw %0, %3\n"
  478. "bne- 2f\n"
  479. "stwcx. %4,%y2\n"
  480. "bne- 0b\n"
  481. "addi %1,0,1\n"
  482. "1:"
  483. BOOST_ATOMIC_ASM_SLOWPATH_CLEAR
  484. : "=&b" (expected), "=&b" (success), "+Z"(v_)
  485. : "b" (expected), "b" (desired)
  486. : "cr0"
  487. );
  488. if (success)
  489. ppc_fence_after(success_order);
  490. else
  491. ppc_fence_after(failure_order);
  492. return success;
  493. }
  494. value_type
  495. fetch_add(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  496. {
  497. value_type original, tmp;
  498. ppc_fence_before(order);
  499. __asm__ (
  500. "1:\n"
  501. "lwarx %0,%y2\n"
  502. "add %1,%0,%3\n"
  503. "rlwinm %1, %1, 0, 0xff\n"
  504. "stwcx. %1,%y2\n"
  505. "bne- 1b\n"
  506. : "=&b" (original), "=&b" (tmp), "+Z"(v_)
  507. : "b" (v)
  508. : "cc");
  509. ppc_fence_after(order);
  510. return original;
  511. }
  512. value_type
  513. fetch_sub(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  514. {
  515. value_type original, tmp;
  516. ppc_fence_before(order);
  517. __asm__ (
  518. "1:\n"
  519. "lwarx %0,%y2\n"
  520. "sub %1,%0,%3\n"
  521. "rlwinm %1, %1, 0, 0xff\n"
  522. "stwcx. %1,%y2\n"
  523. "bne- 1b\n"
  524. : "=&b" (original), "=&b" (tmp), "+Z"(v_)
  525. : "b" (v)
  526. : "cc");
  527. ppc_fence_after(order);
  528. return original;
  529. }
  530. value_type
  531. fetch_and(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  532. {
  533. value_type original, tmp;
  534. ppc_fence_before(order);
  535. __asm__ (
  536. "1:\n"
  537. "lwarx %0,%y2\n"
  538. "and %1,%0,%3\n"
  539. "stwcx. %1,%y2\n"
  540. "bne- 1b\n"
  541. : "=&b" (original), "=&b" (tmp), "+Z"(v_)
  542. : "b" (v)
  543. : "cc");
  544. ppc_fence_after(order);
  545. return original;
  546. }
  547. value_type
  548. fetch_or(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  549. {
  550. value_type original, tmp;
  551. ppc_fence_before(order);
  552. __asm__ (
  553. "1:\n"
  554. "lwarx %0,%y2\n"
  555. "or %1,%0,%3\n"
  556. "stwcx. %1,%y2\n"
  557. "bne- 1b\n"
  558. : "=&b" (original), "=&b" (tmp), "+Z"(v_)
  559. : "b" (v)
  560. : "cc");
  561. ppc_fence_after(order);
  562. return original;
  563. }
  564. value_type
  565. fetch_xor(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  566. {
  567. value_type original, tmp;
  568. ppc_fence_before(order);
  569. __asm__ (
  570. "1:\n"
  571. "lwarx %0,%y2\n"
  572. "xor %1,%0,%3\n"
  573. "stwcx. %1,%y2\n"
  574. "bne- 1b\n"
  575. : "=&b" (original), "=&b" (tmp), "+Z"(v_)
  576. : "b" (v)
  577. : "cc");
  578. ppc_fence_after(order);
  579. return original;
  580. }
  581. bool
  582. is_lock_free(void) const volatile BOOST_NOEXCEPT
  583. {
  584. return true;
  585. }
  586. BOOST_ATOMIC_DECLARE_INTEGRAL_OPERATORS
  587. BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
  588. BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
  589. private:
  590. storage_type v_;
  591. };
  592. template<typename T>
  593. class base_atomic<T, int, 2, true>
  594. {
  595. private:
  596. typedef base_atomic this_type;
  597. typedef T value_type;
  598. typedef int32_t storage_type;
  599. typedef T difference_type;
  600. protected:
  601. typedef value_type value_arg_type;
  602. public:
  603. BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
  604. BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : v_(v) {}
  605. void
  606. store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  607. {
  608. ppc_fence_before(order);
  609. __asm__ (
  610. "stw %1, %0\n"
  611. : "+m"(v_)
  612. : "r" (v)
  613. );
  614. ppc_fence_after_store(order);
  615. }
  616. value_type
  617. load(memory_order order = memory_order_seq_cst) const volatile BOOST_NOEXCEPT
  618. {
  619. value_type v;
  620. __asm__ __volatile__ (
  621. "lwz %0, %1\n"
  622. "cmpw %0, %0\n"
  623. "bne- 1f\n"
  624. "1:\n"
  625. : "=&r" (v)
  626. : "m" (v_)
  627. );
  628. ppc_fence_after(order);
  629. return v;
  630. }
  631. value_type
  632. exchange(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  633. {
  634. value_type original;
  635. ppc_fence_before(order);
  636. __asm__ (
  637. "1:\n"
  638. "lwarx %0,%y1\n"
  639. "stwcx. %2,%y1\n"
  640. "bne- 1b\n"
  641. : "=&b" (original), "+Z"(v_)
  642. : "b" (v)
  643. : "cr0"
  644. );
  645. ppc_fence_after(order);
  646. return original;
  647. }
  648. bool
  649. compare_exchange_weak(
  650. value_type & expected,
  651. value_type desired,
  652. memory_order success_order,
  653. memory_order failure_order) volatile BOOST_NOEXCEPT
  654. {
  655. int success;
  656. ppc_fence_before(success_order);
  657. __asm__(
  658. "lwarx %0,%y2\n"
  659. "cmpw %0, %3\n"
  660. "bne- 2f\n"
  661. "stwcx. %4,%y2\n"
  662. "bne- 2f\n"
  663. "addi %1,0,1\n"
  664. "1:"
  665. BOOST_ATOMIC_ASM_SLOWPATH_CLEAR
  666. : "=&b" (expected), "=&b" (success), "+Z"(v_)
  667. : "b" (expected), "b" (desired)
  668. : "cr0"
  669. );
  670. if (success)
  671. ppc_fence_after(success_order);
  672. else
  673. ppc_fence_after(failure_order);
  674. return success;
  675. }
  676. bool
  677. compare_exchange_strong(
  678. value_type & expected,
  679. value_type desired,
  680. memory_order success_order,
  681. memory_order failure_order) volatile BOOST_NOEXCEPT
  682. {
  683. int success;
  684. ppc_fence_before(success_order);
  685. __asm__(
  686. "0: lwarx %0,%y2\n"
  687. "cmpw %0, %3\n"
  688. "bne- 2f\n"
  689. "stwcx. %4,%y2\n"
  690. "bne- 0b\n"
  691. "addi %1,0,1\n"
  692. "1:"
  693. BOOST_ATOMIC_ASM_SLOWPATH_CLEAR
  694. : "=&b" (expected), "=&b" (success), "+Z"(v_)
  695. : "b" (expected), "b" (desired)
  696. : "cr0"
  697. );
  698. if (success)
  699. ppc_fence_after(success_order);
  700. else
  701. ppc_fence_after(failure_order);
  702. return success;
  703. }
  704. value_type
  705. fetch_add(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  706. {
  707. value_type original, tmp;
  708. ppc_fence_before(order);
  709. __asm__ (
  710. "1:\n"
  711. "lwarx %0,%y2\n"
  712. "add %1,%0,%3\n"
  713. "extsh %1, %1\n"
  714. "stwcx. %1,%y2\n"
  715. "bne- 1b\n"
  716. : "=&b" (original), "=&b" (tmp), "+Z"(v_)
  717. : "b" (v)
  718. : "cc");
  719. ppc_fence_after(order);
  720. return original;
  721. }
  722. value_type
  723. fetch_sub(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  724. {
  725. value_type original, tmp;
  726. ppc_fence_before(order);
  727. __asm__ (
  728. "1:\n"
  729. "lwarx %0,%y2\n"
  730. "sub %1,%0,%3\n"
  731. "extsh %1, %1\n"
  732. "stwcx. %1,%y2\n"
  733. "bne- 1b\n"
  734. : "=&b" (original), "=&b" (tmp), "+Z"(v_)
  735. : "b" (v)
  736. : "cc");
  737. ppc_fence_after(order);
  738. return original;
  739. }
  740. value_type
  741. fetch_and(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  742. {
  743. value_type original, tmp;
  744. ppc_fence_before(order);
  745. __asm__ (
  746. "1:\n"
  747. "lwarx %0,%y2\n"
  748. "and %1,%0,%3\n"
  749. "stwcx. %1,%y2\n"
  750. "bne- 1b\n"
  751. : "=&b" (original), "=&b" (tmp), "+Z"(v_)
  752. : "b" (v)
  753. : "cc");
  754. ppc_fence_after(order);
  755. return original;
  756. }
  757. value_type
  758. fetch_or(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  759. {
  760. value_type original, tmp;
  761. ppc_fence_before(order);
  762. __asm__ (
  763. "1:\n"
  764. "lwarx %0,%y2\n"
  765. "or %1,%0,%3\n"
  766. "stwcx. %1,%y2\n"
  767. "bne- 1b\n"
  768. : "=&b" (original), "=&b" (tmp), "+Z"(v_)
  769. : "b" (v)
  770. : "cc");
  771. ppc_fence_after(order);
  772. return original;
  773. }
  774. value_type
  775. fetch_xor(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  776. {
  777. value_type original, tmp;
  778. ppc_fence_before(order);
  779. __asm__ (
  780. "1:\n"
  781. "lwarx %0,%y2\n"
  782. "xor %1,%0,%3\n"
  783. "stwcx. %1,%y2\n"
  784. "bne- 1b\n"
  785. : "=&b" (original), "=&b" (tmp), "+Z"(v_)
  786. : "b" (v)
  787. : "cc");
  788. ppc_fence_after(order);
  789. return original;
  790. }
  791. bool
  792. is_lock_free(void) const volatile BOOST_NOEXCEPT
  793. {
  794. return true;
  795. }
  796. BOOST_ATOMIC_DECLARE_INTEGRAL_OPERATORS
  797. BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
  798. BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
  799. private:
  800. storage_type v_;
  801. };
  802. template<typename T>
  803. class base_atomic<T, int, 2, false>
  804. {
  805. private:
  806. typedef base_atomic this_type;
  807. typedef T value_type;
  808. typedef uint32_t storage_type;
  809. typedef T difference_type;
  810. protected:
  811. typedef value_type value_arg_type;
  812. public:
  813. BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
  814. BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : v_(v) {}
  815. void
  816. store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  817. {
  818. ppc_fence_before(order);
  819. __asm__ (
  820. "stw %1, %0\n"
  821. : "+m"(v_)
  822. : "r" (v)
  823. );
  824. ppc_fence_after_store(order);
  825. }
  826. value_type
  827. load(memory_order order = memory_order_seq_cst) const volatile BOOST_NOEXCEPT
  828. {
  829. value_type v;
  830. __asm__ __volatile__ (
  831. "lwz %0, %1\n"
  832. "cmpw %0, %0\n"
  833. "bne- 1f\n"
  834. "1:\n"
  835. : "=&r" (v)
  836. : "m" (v_)
  837. );
  838. ppc_fence_after(order);
  839. return v;
  840. }
  841. value_type
  842. exchange(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  843. {
  844. value_type original;
  845. ppc_fence_before(order);
  846. __asm__ (
  847. "1:\n"
  848. "lwarx %0,%y1\n"
  849. "stwcx. %2,%y1\n"
  850. "bne- 1b\n"
  851. : "=&b" (original), "+Z"(v_)
  852. : "b" (v)
  853. : "cr0"
  854. );
  855. ppc_fence_after(order);
  856. return original;
  857. }
  858. bool
  859. compare_exchange_weak(
  860. value_type & expected,
  861. value_type desired,
  862. memory_order success_order,
  863. memory_order failure_order) volatile BOOST_NOEXCEPT
  864. {
  865. int success;
  866. ppc_fence_before(success_order);
  867. __asm__(
  868. "lwarx %0,%y2\n"
  869. "cmpw %0, %3\n"
  870. "bne- 2f\n"
  871. "stwcx. %4,%y2\n"
  872. "bne- 2f\n"
  873. "addi %1,0,1\n"
  874. "1:"
  875. BOOST_ATOMIC_ASM_SLOWPATH_CLEAR
  876. : "=&b" (expected), "=&b" (success), "+Z"(v_)
  877. : "b" (expected), "b" (desired)
  878. : "cr0"
  879. );
  880. if (success)
  881. ppc_fence_after(success_order);
  882. else
  883. ppc_fence_after(failure_order);
  884. return success;
  885. }
  886. bool
  887. compare_exchange_strong(
  888. value_type & expected,
  889. value_type desired,
  890. memory_order success_order,
  891. memory_order failure_order) volatile BOOST_NOEXCEPT
  892. {
  893. int success;
  894. ppc_fence_before(success_order);
  895. __asm__(
  896. "0: lwarx %0,%y2\n"
  897. "cmpw %0, %3\n"
  898. "bne- 2f\n"
  899. "stwcx. %4,%y2\n"
  900. "bne- 0b\n"
  901. "addi %1,0,1\n"
  902. "1:"
  903. BOOST_ATOMIC_ASM_SLOWPATH_CLEAR
  904. : "=&b" (expected), "=&b" (success), "+Z"(v_)
  905. : "b" (expected), "b" (desired)
  906. : "cr0"
  907. );
  908. if (success)
  909. ppc_fence_after(success_order);
  910. else
  911. ppc_fence_after(failure_order);
  912. return success;
  913. }
  914. value_type
  915. fetch_add(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  916. {
  917. value_type original, tmp;
  918. ppc_fence_before(order);
  919. __asm__ (
  920. "1:\n"
  921. "lwarx %0,%y2\n"
  922. "add %1,%0,%3\n"
  923. "rlwinm %1, %1, 0, 0xffff\n"
  924. "stwcx. %1,%y2\n"
  925. "bne- 1b\n"
  926. : "=&b" (original), "=&b" (tmp), "+Z"(v_)
  927. : "b" (v)
  928. : "cc");
  929. ppc_fence_after(order);
  930. return original;
  931. }
  932. value_type
  933. fetch_sub(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  934. {
  935. value_type original, tmp;
  936. ppc_fence_before(order);
  937. __asm__ (
  938. "1:\n"
  939. "lwarx %0,%y2\n"
  940. "sub %1,%0,%3\n"
  941. "rlwinm %1, %1, 0, 0xffff\n"
  942. "stwcx. %1,%y2\n"
  943. "bne- 1b\n"
  944. : "=&b" (original), "=&b" (tmp), "+Z"(v_)
  945. : "b" (v)
  946. : "cc");
  947. ppc_fence_after(order);
  948. return original;
  949. }
  950. value_type
  951. fetch_and(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  952. {
  953. value_type original, tmp;
  954. ppc_fence_before(order);
  955. __asm__ (
  956. "1:\n"
  957. "lwarx %0,%y2\n"
  958. "and %1,%0,%3\n"
  959. "stwcx. %1,%y2\n"
  960. "bne- 1b\n"
  961. : "=&b" (original), "=&b" (tmp), "+Z"(v_)
  962. : "b" (v)
  963. : "cc");
  964. ppc_fence_after(order);
  965. return original;
  966. }
  967. value_type
  968. fetch_or(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  969. {
  970. value_type original, tmp;
  971. ppc_fence_before(order);
  972. __asm__ (
  973. "1:\n"
  974. "lwarx %0,%y2\n"
  975. "or %1,%0,%3\n"
  976. "stwcx. %1,%y2\n"
  977. "bne- 1b\n"
  978. : "=&b" (original), "=&b" (tmp), "+Z"(v_)
  979. : "b" (v)
  980. : "cc");
  981. ppc_fence_after(order);
  982. return original;
  983. }
  984. value_type
  985. fetch_xor(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  986. {
  987. value_type original, tmp;
  988. ppc_fence_before(order);
  989. __asm__ (
  990. "1:\n"
  991. "lwarx %0,%y2\n"
  992. "xor %1,%0,%3\n"
  993. "stwcx. %1,%y2\n"
  994. "bne- 1b\n"
  995. : "=&b" (original), "=&b" (tmp), "+Z"(v_)
  996. : "b" (v)
  997. : "cc");
  998. ppc_fence_after(order);
  999. return original;
  1000. }
  1001. bool
  1002. is_lock_free(void) const volatile BOOST_NOEXCEPT
  1003. {
  1004. return true;
  1005. }
  1006. BOOST_ATOMIC_DECLARE_INTEGRAL_OPERATORS
  1007. BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
  1008. BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
  1009. private:
  1010. storage_type v_;
  1011. };
  1012. template<typename T, bool Sign>
  1013. class base_atomic<T, int, 4, Sign>
  1014. {
  1015. private:
  1016. typedef base_atomic this_type;
  1017. typedef T value_type;
  1018. typedef T difference_type;
  1019. protected:
  1020. typedef value_type value_arg_type;
  1021. public:
  1022. BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
  1023. BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : v_(v) {}
  1024. void
  1025. store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  1026. {
  1027. ppc_fence_before(order);
  1028. const_cast<volatile value_type &>(v_) = v;
  1029. ppc_fence_after_store(order);
  1030. }
  1031. value_type
  1032. load(memory_order order = memory_order_seq_cst) const volatile BOOST_NOEXCEPT
  1033. {
  1034. value_type v = const_cast<const volatile value_type &>(v_);
  1035. __asm__ __volatile__ (
  1036. "cmpw %0, %0\n"
  1037. "bne- 1f\n"
  1038. "1:\n"
  1039. : "+b"(v)
  1040. :
  1041. : "cr0"
  1042. );
  1043. ppc_fence_after(order);
  1044. return v;
  1045. }
  1046. value_type
  1047. exchange(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  1048. {
  1049. value_type original;
  1050. ppc_fence_before(order);
  1051. __asm__ (
  1052. "1:\n"
  1053. "lwarx %0,%y1\n"
  1054. "stwcx. %2,%y1\n"
  1055. "bne- 1b\n"
  1056. : "=&b" (original), "+Z"(v_)
  1057. : "b" (v)
  1058. : "cr0"
  1059. );
  1060. ppc_fence_after(order);
  1061. return original;
  1062. }
  1063. bool
  1064. compare_exchange_weak(
  1065. value_type & expected,
  1066. value_type desired,
  1067. memory_order success_order,
  1068. memory_order failure_order) volatile BOOST_NOEXCEPT
  1069. {
  1070. int success;
  1071. ppc_fence_before(success_order);
  1072. __asm__(
  1073. "lwarx %0,%y2\n"
  1074. "cmpw %0, %3\n"
  1075. "bne- 2f\n"
  1076. "stwcx. %4,%y2\n"
  1077. "bne- 2f\n"
  1078. "addi %1,0,1\n"
  1079. "1:"
  1080. BOOST_ATOMIC_ASM_SLOWPATH_CLEAR
  1081. : "=&b" (expected), "=&b" (success), "+Z"(v_)
  1082. : "b" (expected), "b" (desired)
  1083. : "cr0"
  1084. );
  1085. if (success)
  1086. ppc_fence_after(success_order);
  1087. else
  1088. ppc_fence_after(failure_order);
  1089. return success;
  1090. }
  1091. bool
  1092. compare_exchange_strong(
  1093. value_type & expected,
  1094. value_type desired,
  1095. memory_order success_order,
  1096. memory_order failure_order) volatile BOOST_NOEXCEPT
  1097. {
  1098. int success;
  1099. ppc_fence_before(success_order);
  1100. __asm__(
  1101. "0: lwarx %0,%y2\n"
  1102. "cmpw %0, %3\n"
  1103. "bne- 2f\n"
  1104. "stwcx. %4,%y2\n"
  1105. "bne- 0b\n"
  1106. "addi %1,0,1\n"
  1107. "1:"
  1108. BOOST_ATOMIC_ASM_SLOWPATH_CLEAR
  1109. : "=&b" (expected), "=&b" (success), "+Z"(v_)
  1110. : "b" (expected), "b" (desired)
  1111. : "cr0"
  1112. );
  1113. if (success)
  1114. ppc_fence_after(success_order);
  1115. else
  1116. ppc_fence_after(failure_order);
  1117. return success;
  1118. }
  1119. value_type
  1120. fetch_add(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  1121. {
  1122. value_type original, tmp;
  1123. ppc_fence_before(order);
  1124. __asm__ (
  1125. "1:\n"
  1126. "lwarx %0,%y2\n"
  1127. "add %1,%0,%3\n"
  1128. "stwcx. %1,%y2\n"
  1129. "bne- 1b\n"
  1130. : "=&b" (original), "=&b" (tmp), "+Z"(v_)
  1131. : "b" (v)
  1132. : "cc");
  1133. ppc_fence_after(order);
  1134. return original;
  1135. }
  1136. value_type
  1137. fetch_sub(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  1138. {
  1139. value_type original, tmp;
  1140. ppc_fence_before(order);
  1141. __asm__ (
  1142. "1:\n"
  1143. "lwarx %0,%y2\n"
  1144. "sub %1,%0,%3\n"
  1145. "stwcx. %1,%y2\n"
  1146. "bne- 1b\n"
  1147. : "=&b" (original), "=&b" (tmp), "+Z"(v_)
  1148. : "b" (v)
  1149. : "cc");
  1150. ppc_fence_after(order);
  1151. return original;
  1152. }
  1153. value_type
  1154. fetch_and(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  1155. {
  1156. value_type original, tmp;
  1157. ppc_fence_before(order);
  1158. __asm__ (
  1159. "1:\n"
  1160. "lwarx %0,%y2\n"
  1161. "and %1,%0,%3\n"
  1162. "stwcx. %1,%y2\n"
  1163. "bne- 1b\n"
  1164. : "=&b" (original), "=&b" (tmp), "+Z"(v_)
  1165. : "b" (v)
  1166. : "cc");
  1167. ppc_fence_after(order);
  1168. return original;
  1169. }
  1170. value_type
  1171. fetch_or(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  1172. {
  1173. value_type original, tmp;
  1174. ppc_fence_before(order);
  1175. __asm__ (
  1176. "1:\n"
  1177. "lwarx %0,%y2\n"
  1178. "or %1,%0,%3\n"
  1179. "stwcx. %1,%y2\n"
  1180. "bne- 1b\n"
  1181. : "=&b" (original), "=&b" (tmp), "+Z"(v_)
  1182. : "b" (v)
  1183. : "cc");
  1184. ppc_fence_after(order);
  1185. return original;
  1186. }
  1187. value_type
  1188. fetch_xor(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  1189. {
  1190. value_type original, tmp;
  1191. ppc_fence_before(order);
  1192. __asm__ (
  1193. "1:\n"
  1194. "lwarx %0,%y2\n"
  1195. "xor %1,%0,%3\n"
  1196. "stwcx. %1,%y2\n"
  1197. "bne- 1b\n"
  1198. : "=&b" (original), "=&b" (tmp), "+Z"(v_)
  1199. : "b" (v)
  1200. : "cc");
  1201. ppc_fence_after(order);
  1202. return original;
  1203. }
  1204. bool
  1205. is_lock_free(void) const volatile BOOST_NOEXCEPT
  1206. {
  1207. return true;
  1208. }
  1209. BOOST_ATOMIC_DECLARE_INTEGRAL_OPERATORS
  1210. BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
  1211. BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
  1212. private:
  1213. value_type v_;
  1214. };
  1215. #if defined(__powerpc64__)
  1216. template<typename T, bool Sign>
  1217. class base_atomic<T, int, 8, Sign>
  1218. {
  1219. private:
  1220. typedef base_atomic this_type;
  1221. typedef T value_type;
  1222. typedef T difference_type;
  1223. protected:
  1224. typedef value_type value_arg_type;
  1225. public:
  1226. BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
  1227. BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : v_(v) {}
  1228. void
  1229. store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  1230. {
  1231. ppc_fence_before(order);
  1232. const_cast<volatile value_type &>(v_) = v;
  1233. ppc_fence_after_store(order);
  1234. }
  1235. value_type
  1236. load(memory_order order = memory_order_seq_cst) const volatile BOOST_NOEXCEPT
  1237. {
  1238. value_type v = const_cast<const volatile value_type &>(v_);
  1239. __asm__ __volatile__ (
  1240. "cmpd %0, %0\n"
  1241. "bne- 1f\n"
  1242. "1:\n"
  1243. : "+b"(v)
  1244. :
  1245. : "cr0"
  1246. );
  1247. ppc_fence_after(order);
  1248. return v;
  1249. }
  1250. value_type
  1251. exchange(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  1252. {
  1253. value_type original;
  1254. ppc_fence_before(order);
  1255. __asm__ (
  1256. "1:\n"
  1257. "ldarx %0,%y1\n"
  1258. "stdcx. %2,%y1\n"
  1259. "bne- 1b\n"
  1260. : "=&b" (original), "+Z"(v_)
  1261. : "b" (v)
  1262. : "cr0"
  1263. );
  1264. ppc_fence_after(order);
  1265. return original;
  1266. }
  1267. bool
  1268. compare_exchange_weak(
  1269. value_type & expected,
  1270. value_type desired,
  1271. memory_order success_order,
  1272. memory_order failure_order) volatile BOOST_NOEXCEPT
  1273. {
  1274. int success;
  1275. ppc_fence_before(success_order);
  1276. __asm__(
  1277. "ldarx %0,%y2\n"
  1278. "cmpd %0, %3\n"
  1279. "bne- 2f\n"
  1280. "stdcx. %4,%y2\n"
  1281. "bne- 2f\n"
  1282. "addi %1,0,1\n"
  1283. "1:"
  1284. BOOST_ATOMIC_ASM_SLOWPATH_CLEAR
  1285. : "=&b" (expected), "=&b" (success), "+Z"(v_)
  1286. : "b" (expected), "b" (desired)
  1287. : "cr0"
  1288. );
  1289. if (success)
  1290. ppc_fence_after(success_order);
  1291. else
  1292. ppc_fence_after(failure_order);
  1293. return success;
  1294. }
  1295. bool
  1296. compare_exchange_strong(
  1297. value_type & expected,
  1298. value_type desired,
  1299. memory_order success_order,
  1300. memory_order failure_order) volatile BOOST_NOEXCEPT
  1301. {
  1302. int success;
  1303. ppc_fence_before(success_order);
  1304. __asm__(
  1305. "0: ldarx %0,%y2\n"
  1306. "cmpd %0, %3\n"
  1307. "bne- 2f\n"
  1308. "stdcx. %4,%y2\n"
  1309. "bne- 0b\n"
  1310. "addi %1,0,1\n"
  1311. "1:"
  1312. BOOST_ATOMIC_ASM_SLOWPATH_CLEAR
  1313. : "=&b" (expected), "=&b" (success), "+Z"(v_)
  1314. : "b" (expected), "b" (desired)
  1315. : "cr0"
  1316. );
  1317. if (success)
  1318. ppc_fence_after(success_order);
  1319. else
  1320. ppc_fence_after(failure_order);
  1321. return success;
  1322. }
  1323. value_type
  1324. fetch_add(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  1325. {
  1326. value_type original, tmp;
  1327. ppc_fence_before(order);
  1328. __asm__ (
  1329. "1:\n"
  1330. "ldarx %0,%y2\n"
  1331. "add %1,%0,%3\n"
  1332. "stdcx. %1,%y2\n"
  1333. "bne- 1b\n"
  1334. : "=&b" (original), "=&b" (tmp), "+Z"(v_)
  1335. : "b" (v)
  1336. : "cc");
  1337. ppc_fence_after(order);
  1338. return original;
  1339. }
  1340. value_type
  1341. fetch_sub(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  1342. {
  1343. value_type original, tmp;
  1344. ppc_fence_before(order);
  1345. __asm__ (
  1346. "1:\n"
  1347. "ldarx %0,%y2\n"
  1348. "sub %1,%0,%3\n"
  1349. "stdcx. %1,%y2\n"
  1350. "bne- 1b\n"
  1351. : "=&b" (original), "=&b" (tmp), "+Z"(v_)
  1352. : "b" (v)
  1353. : "cc");
  1354. ppc_fence_after(order);
  1355. return original;
  1356. }
  1357. value_type
  1358. fetch_and(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  1359. {
  1360. value_type original, tmp;
  1361. ppc_fence_before(order);
  1362. __asm__ (
  1363. "1:\n"
  1364. "ldarx %0,%y2\n"
  1365. "and %1,%0,%3\n"
  1366. "stdcx. %1,%y2\n"
  1367. "bne- 1b\n"
  1368. : "=&b" (original), "=&b" (tmp), "+Z"(v_)
  1369. : "b" (v)
  1370. : "cc");
  1371. ppc_fence_after(order);
  1372. return original;
  1373. }
  1374. value_type
  1375. fetch_or(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  1376. {
  1377. value_type original, tmp;
  1378. ppc_fence_before(order);
  1379. __asm__ (
  1380. "1:\n"
  1381. "ldarx %0,%y2\n"
  1382. "or %1,%0,%3\n"
  1383. "stdcx. %1,%y2\n"
  1384. "bne- 1b\n"
  1385. : "=&b" (original), "=&b" (tmp), "+Z"(v_)
  1386. : "b" (v)
  1387. : "cc");
  1388. ppc_fence_after(order);
  1389. return original;
  1390. }
  1391. value_type
  1392. fetch_xor(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  1393. {
  1394. value_type original, tmp;
  1395. ppc_fence_before(order);
  1396. __asm__ (
  1397. "1:\n"
  1398. "ldarx %0,%y2\n"
  1399. "xor %1,%0,%3\n"
  1400. "stdcx. %1,%y2\n"
  1401. "bne- 1b\n"
  1402. : "=&b" (original), "=&b" (tmp), "+Z"(v_)
  1403. : "b" (v)
  1404. : "cc");
  1405. ppc_fence_after(order);
  1406. return original;
  1407. }
  1408. bool
  1409. is_lock_free(void) const volatile BOOST_NOEXCEPT
  1410. {
  1411. return true;
  1412. }
  1413. BOOST_ATOMIC_DECLARE_INTEGRAL_OPERATORS
  1414. BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
  1415. BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
  1416. private:
  1417. value_type v_;
  1418. };
  1419. #endif
  1420. /* pointer types */
  1421. #if !defined(__powerpc64__)
  1422. template<bool Sign>
  1423. class base_atomic<void *, void *, 4, Sign>
  1424. {
  1425. private:
  1426. typedef base_atomic this_type;
  1427. typedef std::ptrdiff_t difference_type;
  1428. typedef void * value_type;
  1429. protected:
  1430. typedef value_type value_arg_type;
  1431. public:
  1432. BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
  1433. BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : v_(v) {}
  1434. void
  1435. store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  1436. {
  1437. ppc_fence_before(order);
  1438. __asm__ (
  1439. "stw %1, %0\n"
  1440. : "+m" (v_)
  1441. : "r" (v)
  1442. );
  1443. ppc_fence_after_store(order);
  1444. }
  1445. value_type
  1446. load(memory_order order = memory_order_seq_cst) const volatile BOOST_NOEXCEPT
  1447. {
  1448. value_type v;
  1449. __asm__ (
  1450. "lwz %0, %1\n"
  1451. "cmpw %0, %0\n"
  1452. "bne- 1f\n"
  1453. "1:\n"
  1454. : "=r"(v)
  1455. : "m"(v_)
  1456. : "cr0"
  1457. );
  1458. ppc_fence_after(order);
  1459. return v;
  1460. }
  1461. value_type
  1462. exchange(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  1463. {
  1464. value_type original;
  1465. ppc_fence_before(order);
  1466. __asm__ (
  1467. "1:\n"
  1468. "lwarx %0,%y1\n"
  1469. "stwcx. %2,%y1\n"
  1470. "bne- 1b\n"
  1471. : "=&b" (original), "+Z"(v_)
  1472. : "b" (v)
  1473. : "cr0"
  1474. );
  1475. ppc_fence_after(order);
  1476. return original;
  1477. }
  1478. bool
  1479. compare_exchange_weak(
  1480. value_type & expected,
  1481. value_type desired,
  1482. memory_order success_order,
  1483. memory_order failure_order) volatile BOOST_NOEXCEPT
  1484. {
  1485. int success;
  1486. ppc_fence_before(success_order);
  1487. __asm__(
  1488. "lwarx %0,%y2\n"
  1489. "cmpw %0, %3\n"
  1490. "bne- 2f\n"
  1491. "stwcx. %4,%y2\n"
  1492. "bne- 2f\n"
  1493. "addi %1,0,1\n"
  1494. "1:"
  1495. BOOST_ATOMIC_ASM_SLOWPATH_CLEAR
  1496. : "=&b" (expected), "=&b" (success), "+Z"(v_)
  1497. : "b" (expected), "b" (desired)
  1498. : "cr0"
  1499. );
  1500. if (success)
  1501. ppc_fence_after(success_order);
  1502. else
  1503. ppc_fence_after(failure_order);
  1504. return success;
  1505. }
  1506. bool
  1507. compare_exchange_strong(
  1508. value_type & expected,
  1509. value_type desired,
  1510. memory_order success_order,
  1511. memory_order failure_order) volatile BOOST_NOEXCEPT
  1512. {
  1513. int success;
  1514. ppc_fence_before(success_order);
  1515. __asm__(
  1516. "0: lwarx %0,%y2\n"
  1517. "cmpw %0, %3\n"
  1518. "bne- 2f\n"
  1519. "stwcx. %4,%y2\n"
  1520. "bne- 0b\n"
  1521. "addi %1,0,1\n"
  1522. "1:"
  1523. BOOST_ATOMIC_ASM_SLOWPATH_CLEAR
  1524. : "=&b" (expected), "=&b" (success), "+Z"(v_)
  1525. : "b" (expected), "b" (desired)
  1526. : "cr0"
  1527. );
  1528. if (success)
  1529. ppc_fence_after(success_order);
  1530. else
  1531. ppc_fence_after(failure_order);
  1532. return success;
  1533. }
  1534. bool
  1535. is_lock_free(void) const volatile BOOST_NOEXCEPT
  1536. {
  1537. return true;
  1538. }
  1539. value_type
  1540. fetch_add(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  1541. {
  1542. value_type original, tmp;
  1543. ppc_fence_before(order);
  1544. __asm__ (
  1545. "1:\n"
  1546. "lwarx %0,%y2\n"
  1547. "add %1,%0,%3\n"
  1548. "stwcx. %1,%y2\n"
  1549. "bne- 1b\n"
  1550. : "=&b" (original), "=&b" (tmp), "+Z"(v_)
  1551. : "b" (v)
  1552. : "cc");
  1553. ppc_fence_after(order);
  1554. return original;
  1555. }
  1556. value_type
  1557. fetch_sub(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  1558. {
  1559. value_type original, tmp;
  1560. ppc_fence_before(order);
  1561. __asm__ (
  1562. "1:\n"
  1563. "lwarx %0,%y2\n"
  1564. "sub %1,%0,%3\n"
  1565. "stwcx. %1,%y2\n"
  1566. "bne- 1b\n"
  1567. : "=&b" (original), "=&b" (tmp), "+Z"(v_)
  1568. : "b" (v)
  1569. : "cc");
  1570. ppc_fence_after(order);
  1571. return original;
  1572. }
  1573. BOOST_ATOMIC_DECLARE_VOID_POINTER_OPERATORS
  1574. BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
  1575. BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
  1576. private:
  1577. value_type v_;
  1578. };
  1579. template<typename T, bool Sign>
  1580. class base_atomic<T *, void *, 4, Sign>
  1581. {
  1582. private:
  1583. typedef base_atomic this_type;
  1584. typedef T * value_type;
  1585. typedef std::ptrdiff_t difference_type;
  1586. protected:
  1587. typedef value_type value_arg_type;
  1588. public:
  1589. BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
  1590. BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : v_(v) {}
  1591. void
  1592. store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  1593. {
  1594. ppc_fence_before(order);
  1595. __asm__ (
  1596. "stw %1, %0\n"
  1597. : "+m" (v_)
  1598. : "r" (v)
  1599. );
  1600. ppc_fence_after_store(order);
  1601. }
  1602. value_type
  1603. load(memory_order order = memory_order_seq_cst) const volatile BOOST_NOEXCEPT
  1604. {
  1605. value_type v;
  1606. __asm__ (
  1607. "lwz %0, %1\n"
  1608. "cmpw %0, %0\n"
  1609. "bne- 1f\n"
  1610. "1:\n"
  1611. : "=r"(v)
  1612. : "m"(v_)
  1613. : "cr0"
  1614. );
  1615. ppc_fence_after(order);
  1616. return v;
  1617. }
  1618. value_type
  1619. exchange(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  1620. {
  1621. value_type original;
  1622. ppc_fence_before(order);
  1623. __asm__ (
  1624. "1:\n"
  1625. "lwarx %0,%y1\n"
  1626. "stwcx. %2,%y1\n"
  1627. "bne- 1b\n"
  1628. : "=&b" (original), "+Z"(v_)
  1629. : "b" (v)
  1630. : "cr0"
  1631. );
  1632. ppc_fence_after(order);
  1633. return original;
  1634. }
  1635. bool
  1636. compare_exchange_weak(
  1637. value_type & expected,
  1638. value_type desired,
  1639. memory_order success_order,
  1640. memory_order failure_order) volatile BOOST_NOEXCEPT
  1641. {
  1642. int success;
  1643. ppc_fence_before(success_order);
  1644. __asm__(
  1645. "lwarx %0,%y2\n"
  1646. "cmpw %0, %3\n"
  1647. "bne- 2f\n"
  1648. "stwcx. %4,%y2\n"
  1649. "bne- 2f\n"
  1650. "addi %1,0,1\n"
  1651. "1:"
  1652. BOOST_ATOMIC_ASM_SLOWPATH_CLEAR
  1653. : "=&b" (expected), "=&b" (success), "+Z"(v_)
  1654. : "b" (expected), "b" (desired)
  1655. : "cr0"
  1656. );
  1657. if (success)
  1658. ppc_fence_after(success_order);
  1659. else
  1660. ppc_fence_after(failure_order);
  1661. return success;
  1662. }
  1663. bool
  1664. compare_exchange_strong(
  1665. value_type & expected,
  1666. value_type desired,
  1667. memory_order success_order,
  1668. memory_order failure_order) volatile BOOST_NOEXCEPT
  1669. {
  1670. int success;
  1671. ppc_fence_before(success_order);
  1672. __asm__(
  1673. "0: lwarx %0,%y2\n"
  1674. "cmpw %0, %3\n"
  1675. "bne- 2f\n"
  1676. "stwcx. %4,%y2\n"
  1677. "bne- 0b\n"
  1678. "addi %1,0,1\n"
  1679. "1:"
  1680. BOOST_ATOMIC_ASM_SLOWPATH_CLEAR
  1681. : "=&b" (expected), "=&b" (success), "+Z"(v_)
  1682. : "b" (expected), "b" (desired)
  1683. : "cr0"
  1684. );
  1685. if (success)
  1686. ppc_fence_after(success_order);
  1687. else
  1688. ppc_fence_after(failure_order);
  1689. return success;
  1690. }
  1691. value_type
  1692. fetch_add(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  1693. {
  1694. v = v * sizeof(*v_);
  1695. value_type original, tmp;
  1696. ppc_fence_before(order);
  1697. __asm__ (
  1698. "1:\n"
  1699. "lwarx %0,%y2\n"
  1700. "add %1,%0,%3\n"
  1701. "stwcx. %1,%y2\n"
  1702. "bne- 1b\n"
  1703. : "=&b" (original), "=&b" (tmp), "+Z"(v_)
  1704. : "b" (v)
  1705. : "cc");
  1706. ppc_fence_after(order);
  1707. return original;
  1708. }
  1709. value_type
  1710. fetch_sub(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  1711. {
  1712. v = v * sizeof(*v_);
  1713. value_type original, tmp;
  1714. ppc_fence_before(order);
  1715. __asm__ (
  1716. "1:\n"
  1717. "lwarx %0,%y2\n"
  1718. "sub %1,%0,%3\n"
  1719. "stwcx. %1,%y2\n"
  1720. "bne- 1b\n"
  1721. : "=&b" (original), "=&b" (tmp), "+Z"(v_)
  1722. : "b" (v)
  1723. : "cc");
  1724. ppc_fence_after(order);
  1725. return original;
  1726. }
  1727. bool
  1728. is_lock_free(void) const volatile BOOST_NOEXCEPT
  1729. {
  1730. return true;
  1731. }
  1732. BOOST_ATOMIC_DECLARE_POINTER_OPERATORS
  1733. BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
  1734. BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
  1735. private:
  1736. value_type v_;
  1737. };
  1738. #else
  1739. template<bool Sign>
  1740. class base_atomic<void *, void *, 8, Sign>
  1741. {
  1742. private:
  1743. typedef base_atomic this_type;
  1744. typedef std::ptrdiff_t difference_type;
  1745. typedef void * value_type;
  1746. protected:
  1747. typedef value_type value_arg_type;
  1748. public:
  1749. BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
  1750. BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : v_(v) {}
  1751. void
  1752. store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  1753. {
  1754. ppc_fence_before(order);
  1755. __asm__ (
  1756. "std %1, %0\n"
  1757. : "+m" (v_)
  1758. : "r" (v)
  1759. );
  1760. ppc_fence_after_store(order);
  1761. }
  1762. value_type
  1763. load(memory_order order = memory_order_seq_cst) const volatile BOOST_NOEXCEPT
  1764. {
  1765. value_type v;
  1766. __asm__ (
  1767. "ld %0, %1\n"
  1768. "cmpd %0, %0\n"
  1769. "bne- 1f\n"
  1770. "1:\n"
  1771. : "=r"(v)
  1772. : "m"(v_)
  1773. : "cr0"
  1774. );
  1775. ppc_fence_after(order);
  1776. return v;
  1777. }
  1778. value_type
  1779. exchange(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  1780. {
  1781. value_type original;
  1782. ppc_fence_before(order);
  1783. __asm__ (
  1784. "1:\n"
  1785. "ldarx %0,%y1\n"
  1786. "stdcx. %2,%y1\n"
  1787. "bne- 1b\n"
  1788. : "=&b" (original), "+Z"(v_)
  1789. : "b" (v)
  1790. : "cr0"
  1791. );
  1792. ppc_fence_after(order);
  1793. return original;
  1794. }
  1795. bool
  1796. compare_exchange_weak(
  1797. value_type & expected,
  1798. value_type desired,
  1799. memory_order success_order,
  1800. memory_order failure_order) volatile BOOST_NOEXCEPT
  1801. {
  1802. int success;
  1803. ppc_fence_before(success_order);
  1804. __asm__(
  1805. "ldarx %0,%y2\n"
  1806. "cmpd %0, %3\n"
  1807. "bne- 2f\n"
  1808. "stdcx. %4,%y2\n"
  1809. "bne- 2f\n"
  1810. "addi %1,0,1\n"
  1811. "1:"
  1812. BOOST_ATOMIC_ASM_SLOWPATH_CLEAR
  1813. : "=&b" (expected), "=&b" (success), "+Z"(v_)
  1814. : "b" (expected), "b" (desired)
  1815. : "cr0"
  1816. );
  1817. if (success)
  1818. ppc_fence_after(success_order);
  1819. else
  1820. ppc_fence_after(failure_order);
  1821. return success;
  1822. }
  1823. bool
  1824. compare_exchange_strong(
  1825. value_type & expected,
  1826. value_type desired,
  1827. memory_order success_order,
  1828. memory_order failure_order) volatile BOOST_NOEXCEPT
  1829. {
  1830. int success;
  1831. ppc_fence_before(success_order);
  1832. __asm__(
  1833. "0: ldarx %0,%y2\n"
  1834. "cmpd %0, %3\n"
  1835. "bne- 2f\n"
  1836. "stdcx. %4,%y2\n"
  1837. "bne- 0b\n"
  1838. "addi %1,0,1\n"
  1839. "1:"
  1840. BOOST_ATOMIC_ASM_SLOWPATH_CLEAR
  1841. : "=&b" (expected), "=&b" (success), "+Z"(v_)
  1842. : "b" (expected), "b" (desired)
  1843. : "cr0"
  1844. );
  1845. if (success)
  1846. ppc_fence_after(success_order);
  1847. else
  1848. ppc_fence_after(failure_order);
  1849. return success;
  1850. }
  1851. bool
  1852. is_lock_free(void) const volatile BOOST_NOEXCEPT
  1853. {
  1854. return true;
  1855. }
  1856. value_type
  1857. fetch_add(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  1858. {
  1859. value_type original, tmp;
  1860. ppc_fence_before(order);
  1861. __asm__ (
  1862. "1:\n"
  1863. "ldarx %0,%y2\n"
  1864. "add %1,%0,%3\n"
  1865. "stdcx. %1,%y2\n"
  1866. "bne- 1b\n"
  1867. : "=&b" (original), "=&b" (tmp), "+Z"(v_)
  1868. : "b" (v)
  1869. : "cc");
  1870. ppc_fence_after(order);
  1871. return original;
  1872. }
  1873. value_type
  1874. fetch_sub(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  1875. {
  1876. value_type original, tmp;
  1877. ppc_fence_before(order);
  1878. __asm__ (
  1879. "1:\n"
  1880. "ldarx %0,%y2\n"
  1881. "sub %1,%0,%3\n"
  1882. "stdcx. %1,%y2\n"
  1883. "bne- 1b\n"
  1884. : "=&b" (original), "=&b" (tmp), "+Z"(v_)
  1885. : "b" (v)
  1886. : "cc");
  1887. ppc_fence_after(order);
  1888. return original;
  1889. }
  1890. BOOST_ATOMIC_DECLARE_VOID_POINTER_OPERATORS
  1891. BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
  1892. BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
  1893. private:
  1894. value_type v_;
  1895. };
  1896. template<typename T, bool Sign>
  1897. class base_atomic<T *, void *, 8, Sign>
  1898. {
  1899. private:
  1900. typedef base_atomic this_type;
  1901. typedef T * value_type;
  1902. typedef std::ptrdiff_t difference_type;
  1903. protected:
  1904. typedef value_type value_arg_type;
  1905. public:
  1906. BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
  1907. BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : v_(v) {}
  1908. void
  1909. store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  1910. {
  1911. ppc_fence_before(order);
  1912. __asm__ (
  1913. "std %1, %0\n"
  1914. : "+m" (v_)
  1915. : "r" (v)
  1916. );
  1917. ppc_fence_after_store(order);
  1918. }
  1919. value_type
  1920. load(memory_order order = memory_order_seq_cst) const volatile BOOST_NOEXCEPT
  1921. {
  1922. value_type v;
  1923. __asm__ (
  1924. "ld %0, %1\n"
  1925. "cmpd %0, %0\n"
  1926. "bne- 1f\n"
  1927. "1:\n"
  1928. : "=r"(v)
  1929. : "m"(v_)
  1930. : "cr0"
  1931. );
  1932. ppc_fence_after(order);
  1933. return v;
  1934. }
  1935. value_type
  1936. exchange(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  1937. {
  1938. value_type original;
  1939. ppc_fence_before(order);
  1940. __asm__ (
  1941. "1:\n"
  1942. "ldarx %0,%y1\n"
  1943. "stdcx. %2,%y1\n"
  1944. "bne- 1b\n"
  1945. : "=&b" (original), "+Z"(v_)
  1946. : "b" (v)
  1947. : "cr0"
  1948. );
  1949. ppc_fence_after(order);
  1950. return original;
  1951. }
  1952. bool
  1953. compare_exchange_weak(
  1954. value_type & expected,
  1955. value_type desired,
  1956. memory_order success_order,
  1957. memory_order failure_order) volatile BOOST_NOEXCEPT
  1958. {
  1959. int success;
  1960. ppc_fence_before(success_order);
  1961. __asm__(
  1962. "ldarx %0,%y2\n"
  1963. "cmpd %0, %3\n"
  1964. "bne- 2f\n"
  1965. "stdcx. %4,%y2\n"
  1966. "bne- 2f\n"
  1967. "addi %1,0,1\n"
  1968. "1:"
  1969. BOOST_ATOMIC_ASM_SLOWPATH_CLEAR
  1970. : "=&b" (expected), "=&b" (success), "+Z"(v_)
  1971. : "b" (expected), "b" (desired)
  1972. : "cr0"
  1973. );
  1974. if (success)
  1975. ppc_fence_after(success_order);
  1976. else
  1977. ppc_fence_after(failure_order);
  1978. return success;
  1979. }
  1980. bool
  1981. compare_exchange_strong(
  1982. value_type & expected,
  1983. value_type desired,
  1984. memory_order success_order,
  1985. memory_order failure_order) volatile BOOST_NOEXCEPT
  1986. {
  1987. int success;
  1988. ppc_fence_before(success_order);
  1989. __asm__(
  1990. "0: ldarx %0,%y2\n"
  1991. "cmpd %0, %3\n"
  1992. "bne- 2f\n"
  1993. "stdcx. %4,%y2\n"
  1994. "bne- 0b\n"
  1995. "addi %1,0,1\n"
  1996. "1:"
  1997. BOOST_ATOMIC_ASM_SLOWPATH_CLEAR
  1998. : "=&b" (expected), "=&b" (success), "+Z"(v_)
  1999. : "b" (expected), "b" (desired)
  2000. : "cr0"
  2001. );
  2002. if (success)
  2003. ppc_fence_after(success_order);
  2004. else
  2005. ppc_fence_after(failure_order);
  2006. return success;
  2007. }
  2008. value_type
  2009. fetch_add(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  2010. {
  2011. v = v * sizeof(*v_);
  2012. value_type original, tmp;
  2013. ppc_fence_before(order);
  2014. __asm__ (
  2015. "1:\n"
  2016. "ldarx %0,%y2\n"
  2017. "add %1,%0,%3\n"
  2018. "stdcx. %1,%y2\n"
  2019. "bne- 1b\n"
  2020. : "=&b" (original), "=&b" (tmp), "+Z"(v_)
  2021. : "b" (v)
  2022. : "cc");
  2023. ppc_fence_after(order);
  2024. return original;
  2025. }
  2026. value_type
  2027. fetch_sub(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  2028. {
  2029. v = v * sizeof(*v_);
  2030. value_type original, tmp;
  2031. ppc_fence_before(order);
  2032. __asm__ (
  2033. "1:\n"
  2034. "ldarx %0,%y2\n"
  2035. "sub %1,%0,%3\n"
  2036. "stdcx. %1,%y2\n"
  2037. "bne- 1b\n"
  2038. : "=&b" (original), "=&b" (tmp), "+Z"(v_)
  2039. : "b" (v)
  2040. : "cc");
  2041. ppc_fence_after(order);
  2042. return original;
  2043. }
  2044. bool
  2045. is_lock_free(void) const volatile BOOST_NOEXCEPT
  2046. {
  2047. return true;
  2048. }
  2049. BOOST_ATOMIC_DECLARE_POINTER_OPERATORS
  2050. BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
  2051. BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
  2052. private:
  2053. value_type v_;
  2054. };
  2055. #endif
  2056. /* generic */
  2057. template<typename T, bool Sign>
  2058. class base_atomic<T, void, 1, Sign>
  2059. {
  2060. private:
  2061. typedef base_atomic this_type;
  2062. typedef T value_type;
  2063. typedef uint32_t storage_type;
  2064. protected:
  2065. typedef value_type const& value_arg_type;
  2066. public:
  2067. BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
  2068. explicit base_atomic(value_type const& v) BOOST_NOEXCEPT : v_(0)
  2069. {
  2070. memcpy(&v_, &v, sizeof(value_type));
  2071. }
  2072. void
  2073. store(value_type const& v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  2074. {
  2075. storage_type tmp = 0;
  2076. memcpy(&tmp, &v, sizeof(value_type));
  2077. ppc_fence_before(order);
  2078. __asm__ (
  2079. "stw %1, %0\n"
  2080. : "+m" (v_)
  2081. : "r" (tmp)
  2082. );
  2083. ppc_fence_after_store(order);
  2084. }
  2085. value_type
  2086. load(memory_order order = memory_order_seq_cst) const volatile BOOST_NOEXCEPT
  2087. {
  2088. storage_type tmp;
  2089. __asm__ __volatile__ (
  2090. "lwz %0, %1\n"
  2091. "cmpw %0, %0\n"
  2092. "bne- 1f\n"
  2093. "1:\n"
  2094. : "=r"(tmp)
  2095. : "m"(v_)
  2096. : "cr0"
  2097. );
  2098. ppc_fence_after(order);
  2099. value_type v;
  2100. memcpy(&v, &tmp, sizeof(value_type));
  2101. return v;
  2102. }
  2103. value_type
  2104. exchange(value_type const& v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  2105. {
  2106. storage_type tmp = 0, original;
  2107. memcpy(&tmp, &v, sizeof(value_type));
  2108. ppc_fence_before(order);
  2109. __asm__ (
  2110. "1:\n"
  2111. "lwarx %0,%y1\n"
  2112. "stwcx. %2,%y1\n"
  2113. "bne- 1b\n"
  2114. : "=&b" (original), "+Z"(v_)
  2115. : "b" (tmp)
  2116. : "cr0"
  2117. );
  2118. ppc_fence_after(order);
  2119. value_type res;
  2120. memcpy(&res, &original, sizeof(value_type));
  2121. return res;
  2122. }
  2123. bool
  2124. compare_exchange_weak(
  2125. value_type & expected,
  2126. value_type const& desired,
  2127. memory_order success_order,
  2128. memory_order failure_order) volatile BOOST_NOEXCEPT
  2129. {
  2130. storage_type expected_s = 0, desired_s = 0;
  2131. memcpy(&expected_s, &expected, sizeof(value_type));
  2132. memcpy(&desired_s, &desired, sizeof(value_type));
  2133. int success;
  2134. ppc_fence_before(success_order);
  2135. __asm__(
  2136. "lwarx %0,%y2\n"
  2137. "cmpw %0, %3\n"
  2138. "bne- 2f\n"
  2139. "stwcx. %4,%y2\n"
  2140. "bne- 2f\n"
  2141. "addi %1,0,1\n"
  2142. "1:"
  2143. BOOST_ATOMIC_ASM_SLOWPATH_CLEAR
  2144. : "=&b" (expected_s), "=&b" (success), "+Z"(v_)
  2145. : "b" (expected_s), "b" (desired_s)
  2146. : "cr0"
  2147. );
  2148. if (success)
  2149. ppc_fence_after(success_order);
  2150. else
  2151. ppc_fence_after(failure_order);
  2152. memcpy(&expected, &expected_s, sizeof(value_type));
  2153. return success;
  2154. }
  2155. bool
  2156. compare_exchange_strong(
  2157. value_type & expected,
  2158. value_type const& desired,
  2159. memory_order success_order,
  2160. memory_order failure_order) volatile BOOST_NOEXCEPT
  2161. {
  2162. storage_type expected_s = 0, desired_s = 0;
  2163. memcpy(&expected_s, &expected, sizeof(value_type));
  2164. memcpy(&desired_s, &desired, sizeof(value_type));
  2165. int success;
  2166. ppc_fence_before(success_order);
  2167. __asm__(
  2168. "0: lwarx %0,%y2\n"
  2169. "cmpw %0, %3\n"
  2170. "bne- 2f\n"
  2171. "stwcx. %4,%y2\n"
  2172. "bne- 0b\n"
  2173. "addi %1,0,1\n"
  2174. "1:"
  2175. BOOST_ATOMIC_ASM_SLOWPATH_CLEAR
  2176. : "=&b" (expected_s), "=&b" (success), "+Z"(v_)
  2177. : "b" (expected_s), "b" (desired_s)
  2178. : "cr0"
  2179. );
  2180. if (success)
  2181. ppc_fence_after(success_order);
  2182. else
  2183. ppc_fence_after(failure_order);
  2184. memcpy(&expected, &expected_s, sizeof(value_type));
  2185. return success;
  2186. }
  2187. bool
  2188. is_lock_free(void) const volatile BOOST_NOEXCEPT
  2189. {
  2190. return true;
  2191. }
  2192. BOOST_ATOMIC_DECLARE_BASE_OPERATORS
  2193. BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
  2194. BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
  2195. private:
  2196. storage_type v_;
  2197. };
  2198. template<typename T, bool Sign>
  2199. class base_atomic<T, void, 2, Sign>
  2200. {
  2201. private:
  2202. typedef base_atomic this_type;
  2203. typedef T value_type;
  2204. typedef uint32_t storage_type;
  2205. protected:
  2206. typedef value_type const& value_arg_type;
  2207. public:
  2208. BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
  2209. explicit base_atomic(value_type const& v) BOOST_NOEXCEPT : v_(0)
  2210. {
  2211. memcpy(&v_, &v, sizeof(value_type));
  2212. }
  2213. void
  2214. store(value_type const& v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  2215. {
  2216. storage_type tmp = 0;
  2217. memcpy(&tmp, &v, sizeof(value_type));
  2218. ppc_fence_before(order);
  2219. __asm__ (
  2220. "stw %1, %0\n"
  2221. : "+m" (v_)
  2222. : "r" (tmp)
  2223. );
  2224. ppc_fence_after_store(order);
  2225. }
  2226. value_type
  2227. load(memory_order order = memory_order_seq_cst) const volatile BOOST_NOEXCEPT
  2228. {
  2229. storage_type tmp;
  2230. __asm__ __volatile__ (
  2231. "lwz %0, %1\n"
  2232. "cmpw %0, %0\n"
  2233. "bne- 1f\n"
  2234. "1:\n"
  2235. : "=r"(tmp)
  2236. : "m"(v_)
  2237. : "cr0"
  2238. );
  2239. ppc_fence_after(order);
  2240. value_type v;
  2241. memcpy(&v, &tmp, sizeof(value_type));
  2242. return v;
  2243. }
  2244. value_type
  2245. exchange(value_type const& v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  2246. {
  2247. storage_type tmp = 0, original;
  2248. memcpy(&tmp, &v, sizeof(value_type));
  2249. ppc_fence_before(order);
  2250. __asm__ (
  2251. "1:\n"
  2252. "lwarx %0,%y1\n"
  2253. "stwcx. %2,%y1\n"
  2254. "bne- 1b\n"
  2255. : "=&b" (original), "+Z"(v_)
  2256. : "b" (tmp)
  2257. : "cr0"
  2258. );
  2259. ppc_fence_after(order);
  2260. value_type res;
  2261. memcpy(&res, &original, sizeof(value_type));
  2262. return res;
  2263. }
  2264. bool
  2265. compare_exchange_weak(
  2266. value_type & expected,
  2267. value_type const& desired,
  2268. memory_order success_order,
  2269. memory_order failure_order) volatile BOOST_NOEXCEPT
  2270. {
  2271. storage_type expected_s = 0, desired_s = 0;
  2272. memcpy(&expected_s, &expected, sizeof(value_type));
  2273. memcpy(&desired_s, &desired, sizeof(value_type));
  2274. int success;
  2275. ppc_fence_before(success_order);
  2276. __asm__(
  2277. "lwarx %0,%y2\n"
  2278. "cmpw %0, %3\n"
  2279. "bne- 2f\n"
  2280. "stwcx. %4,%y2\n"
  2281. "bne- 2f\n"
  2282. "addi %1,0,1\n"
  2283. "1:"
  2284. BOOST_ATOMIC_ASM_SLOWPATH_CLEAR
  2285. : "=&b" (expected_s), "=&b" (success), "+Z"(v_)
  2286. : "b" (expected_s), "b" (desired_s)
  2287. : "cr0"
  2288. );
  2289. if (success)
  2290. ppc_fence_after(success_order);
  2291. else
  2292. ppc_fence_after(failure_order);
  2293. memcpy(&expected, &expected_s, sizeof(value_type));
  2294. return success;
  2295. }
  2296. bool
  2297. compare_exchange_strong(
  2298. value_type & expected,
  2299. value_type const& desired,
  2300. memory_order success_order,
  2301. memory_order failure_order) volatile BOOST_NOEXCEPT
  2302. {
  2303. storage_type expected_s = 0, desired_s = 0;
  2304. memcpy(&expected_s, &expected, sizeof(value_type));
  2305. memcpy(&desired_s, &desired, sizeof(value_type));
  2306. int success;
  2307. ppc_fence_before(success_order);
  2308. __asm__(
  2309. "0: lwarx %0,%y2\n"
  2310. "cmpw %0, %3\n"
  2311. "bne- 2f\n"
  2312. "stwcx. %4,%y2\n"
  2313. "bne- 0b\n"
  2314. "addi %1,0,1\n"
  2315. "1:"
  2316. BOOST_ATOMIC_ASM_SLOWPATH_CLEAR
  2317. : "=&b" (expected_s), "=&b" (success), "+Z"(v_)
  2318. : "b" (expected_s), "b" (desired_s)
  2319. : "cr0"
  2320. );
  2321. if (success)
  2322. ppc_fence_after(success_order);
  2323. else
  2324. ppc_fence_after(failure_order);
  2325. memcpy(&expected, &expected_s, sizeof(value_type));
  2326. return success;
  2327. }
  2328. bool
  2329. is_lock_free(void) const volatile BOOST_NOEXCEPT
  2330. {
  2331. return true;
  2332. }
  2333. BOOST_ATOMIC_DECLARE_BASE_OPERATORS
  2334. BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
  2335. BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
  2336. private:
  2337. storage_type v_;
  2338. };
  2339. template<typename T, bool Sign>
  2340. class base_atomic<T, void, 4, Sign>
  2341. {
  2342. private:
  2343. typedef base_atomic this_type;
  2344. typedef T value_type;
  2345. typedef uint32_t storage_type;
  2346. protected:
  2347. typedef value_type const& value_arg_type;
  2348. public:
  2349. BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
  2350. explicit base_atomic(value_type const& v) BOOST_NOEXCEPT : v_(0)
  2351. {
  2352. memcpy(&v_, &v, sizeof(value_type));
  2353. }
  2354. void
  2355. store(value_type const& v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  2356. {
  2357. storage_type tmp = 0;
  2358. memcpy(&tmp, &v, sizeof(value_type));
  2359. ppc_fence_before(order);
  2360. __asm__ (
  2361. "stw %1, %0\n"
  2362. : "+m" (v_)
  2363. : "r" (tmp)
  2364. );
  2365. ppc_fence_after_store(order);
  2366. }
  2367. value_type
  2368. load(memory_order order = memory_order_seq_cst) const volatile BOOST_NOEXCEPT
  2369. {
  2370. storage_type tmp;
  2371. __asm__ __volatile__ (
  2372. "lwz %0, %1\n"
  2373. "cmpw %0, %0\n"
  2374. "bne- 1f\n"
  2375. "1:\n"
  2376. : "=r"(tmp)
  2377. : "m"(v_)
  2378. : "cr0"
  2379. );
  2380. ppc_fence_after(order);
  2381. value_type v;
  2382. memcpy(&v, &tmp, sizeof(value_type));
  2383. return v;
  2384. }
  2385. value_type
  2386. exchange(value_type const& v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  2387. {
  2388. storage_type tmp = 0, original;
  2389. memcpy(&tmp, &v, sizeof(value_type));
  2390. ppc_fence_before(order);
  2391. __asm__ (
  2392. "1:\n"
  2393. "lwarx %0,%y1\n"
  2394. "stwcx. %2,%y1\n"
  2395. "bne- 1b\n"
  2396. : "=&b" (original), "+Z"(v_)
  2397. : "b" (tmp)
  2398. : "cr0"
  2399. );
  2400. ppc_fence_after(order);
  2401. value_type res;
  2402. memcpy(&res, &original, sizeof(value_type));
  2403. return res;
  2404. }
  2405. bool
  2406. compare_exchange_weak(
  2407. value_type & expected,
  2408. value_type const& desired,
  2409. memory_order success_order,
  2410. memory_order failure_order) volatile BOOST_NOEXCEPT
  2411. {
  2412. storage_type expected_s = 0, desired_s = 0;
  2413. memcpy(&expected_s, &expected, sizeof(value_type));
  2414. memcpy(&desired_s, &desired, sizeof(value_type));
  2415. int success;
  2416. ppc_fence_before(success_order);
  2417. __asm__(
  2418. "lwarx %0,%y2\n"
  2419. "cmpw %0, %3\n"
  2420. "bne- 2f\n"
  2421. "stwcx. %4,%y2\n"
  2422. "bne- 2f\n"
  2423. "addi %1,0,1\n"
  2424. "1:"
  2425. BOOST_ATOMIC_ASM_SLOWPATH_CLEAR
  2426. : "=&b" (expected_s), "=&b" (success), "+Z"(v_)
  2427. : "b" (expected_s), "b" (desired_s)
  2428. : "cr0"
  2429. );
  2430. if (success)
  2431. ppc_fence_after(success_order);
  2432. else
  2433. ppc_fence_after(failure_order);
  2434. memcpy(&expected, &expected_s, sizeof(value_type));
  2435. return success;
  2436. }
  2437. bool
  2438. compare_exchange_strong(
  2439. value_type & expected,
  2440. value_type const& desired,
  2441. memory_order success_order,
  2442. memory_order failure_order) volatile BOOST_NOEXCEPT
  2443. {
  2444. storage_type expected_s = 0, desired_s = 0;
  2445. memcpy(&expected_s, &expected, sizeof(value_type));
  2446. memcpy(&desired_s, &desired, sizeof(value_type));
  2447. int success;
  2448. ppc_fence_before(success_order);
  2449. __asm__(
  2450. "0: lwarx %0,%y2\n"
  2451. "cmpw %0, %3\n"
  2452. "bne- 2f\n"
  2453. "stwcx. %4,%y2\n"
  2454. "bne- 0b\n"
  2455. "addi %1,0,1\n"
  2456. "1:"
  2457. BOOST_ATOMIC_ASM_SLOWPATH_CLEAR
  2458. : "=&b" (expected_s), "=&b" (success), "+Z"(v_)
  2459. : "b" (expected_s), "b" (desired_s)
  2460. : "cr0"
  2461. );
  2462. if (success)
  2463. ppc_fence_after(success_order);
  2464. else
  2465. ppc_fence_after(failure_order);
  2466. memcpy(&expected, &expected_s, sizeof(value_type));
  2467. return success;
  2468. }
  2469. bool
  2470. is_lock_free(void) const volatile BOOST_NOEXCEPT
  2471. {
  2472. return true;
  2473. }
  2474. BOOST_ATOMIC_DECLARE_BASE_OPERATORS
  2475. BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
  2476. BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
  2477. private:
  2478. storage_type v_;
  2479. };
  2480. #if defined(__powerpc64__)
  2481. template<typename T, bool Sign>
  2482. class base_atomic<T, void, 8, Sign>
  2483. {
  2484. private:
  2485. typedef base_atomic this_type;
  2486. typedef T value_type;
  2487. typedef uint64_t storage_type;
  2488. protected:
  2489. typedef value_type const& value_arg_type;
  2490. public:
  2491. BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
  2492. explicit base_atomic(value_type const& v) BOOST_NOEXCEPT : v_(0)
  2493. {
  2494. memcpy(&v_, &v, sizeof(value_type));
  2495. }
  2496. void
  2497. store(value_type const& v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  2498. {
  2499. storage_type tmp;
  2500. memcpy(&tmp, &v, sizeof(value_type));
  2501. ppc_fence_before(order);
  2502. __asm__ (
  2503. "std %1, %0\n"
  2504. : "+m" (v_)
  2505. : "r" (tmp)
  2506. );
  2507. ppc_fence_after_store(order);
  2508. }
  2509. value_type
  2510. load(memory_order order = memory_order_seq_cst) const volatile BOOST_NOEXCEPT
  2511. {
  2512. storage_type tmp;
  2513. __asm__ __volatile__ (
  2514. "ld %0, %1\n"
  2515. "cmpd %0, %0\n"
  2516. "bne- 1f\n"
  2517. "1:\n"
  2518. : "=r"(tmp)
  2519. : "m"(v_)
  2520. : "cr0"
  2521. );
  2522. ppc_fence_after(order);
  2523. value_type v;
  2524. memcpy(&v, &tmp, sizeof(value_type));
  2525. return v;
  2526. }
  2527. value_type
  2528. exchange(value_type const& v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  2529. {
  2530. storage_type tmp = 0, original;
  2531. memcpy(&tmp, &v, sizeof(value_type));
  2532. ppc_fence_before(order);
  2533. __asm__ (
  2534. "1:\n"
  2535. "ldarx %0,%y1\n"
  2536. "stdcx. %2,%y1\n"
  2537. "bne- 1b\n"
  2538. : "=&b" (original), "+Z"(v_)
  2539. : "b" (tmp)
  2540. : "cr0"
  2541. );
  2542. ppc_fence_after(order);
  2543. value_type res;
  2544. memcpy(&res, &original, sizeof(value_type));
  2545. return res;
  2546. }
  2547. bool
  2548. compare_exchange_weak(
  2549. value_type & expected,
  2550. value_type const& desired,
  2551. memory_order success_order,
  2552. memory_order failure_order) volatile BOOST_NOEXCEPT
  2553. {
  2554. storage_type expected_s, desired_s;
  2555. memcpy(&expected_s, &expected, sizeof(value_type));
  2556. memcpy(&desired_s, &desired, sizeof(value_type));
  2557. int success;
  2558. ppc_fence_before(success_order);
  2559. __asm__(
  2560. "ldarx %0,%y2\n"
  2561. "cmpd %0, %3\n"
  2562. "bne- 2f\n"
  2563. "stdcx. %4,%y2\n"
  2564. "bne- 2f\n"
  2565. "addi %1,0,1\n"
  2566. "1:"
  2567. BOOST_ATOMIC_ASM_SLOWPATH_CLEAR
  2568. : "=&b" (expected_s), "=&b" (success), "+Z"(v_)
  2569. : "b" (expected_s), "b" (desired_s)
  2570. : "cr0"
  2571. );
  2572. if (success)
  2573. ppc_fence_after(success_order);
  2574. else
  2575. ppc_fence_after(failure_order);
  2576. memcpy(&expected, &expected_s, sizeof(value_type));
  2577. return success;
  2578. }
  2579. bool
  2580. compare_exchange_strong(
  2581. value_type & expected,
  2582. value_type const& desired,
  2583. memory_order success_order,
  2584. memory_order failure_order) volatile BOOST_NOEXCEPT
  2585. {
  2586. storage_type expected_s, desired_s;
  2587. memcpy(&expected_s, &expected, sizeof(value_type));
  2588. memcpy(&desired_s, &desired, sizeof(value_type));
  2589. int success;
  2590. ppc_fence_before(success_order);
  2591. __asm__(
  2592. "0: ldarx %0,%y2\n"
  2593. "cmpd %0, %3\n"
  2594. "bne- 2f\n"
  2595. "stdcx. %4,%y2\n"
  2596. "bne- 0b\n"
  2597. "addi %1,0,1\n"
  2598. "1:"
  2599. BOOST_ATOMIC_ASM_SLOWPATH_CLEAR
  2600. : "=&b" (expected_s), "=&b" (success), "+Z"(v_)
  2601. : "b" (expected_s), "b" (desired_s)
  2602. : "cr0"
  2603. );
  2604. if (success)
  2605. ppc_fence_after(success_order);
  2606. else
  2607. ppc_fence_after(failure_order);
  2608. memcpy(&expected, &expected_s, sizeof(value_type));
  2609. return success;
  2610. }
  2611. bool
  2612. is_lock_free(void) const volatile BOOST_NOEXCEPT
  2613. {
  2614. return true;
  2615. }
  2616. BOOST_ATOMIC_DECLARE_BASE_OPERATORS
  2617. BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
  2618. BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
  2619. private:
  2620. storage_type v_;
  2621. };
  2622. #endif
  2623. }
  2624. }
  2625. #define BOOST_ATOMIC_THREAD_FENCE 2
  2626. inline void
  2627. atomic_thread_fence(memory_order order)
  2628. {
  2629. switch(order)
  2630. {
  2631. case memory_order_acquire:
  2632. __asm__ __volatile__ ("isync" ::: "memory");
  2633. break;
  2634. case memory_order_release:
  2635. #if defined(__powerpc64__)
  2636. __asm__ __volatile__ ("lwsync" ::: "memory");
  2637. break;
  2638. #endif
  2639. case memory_order_acq_rel:
  2640. case memory_order_seq_cst:
  2641. __asm__ __volatile__ ("sync" ::: "memory");
  2642. default:;
  2643. }
  2644. }
  2645. #define BOOST_ATOMIC_SIGNAL_FENCE 2
  2646. inline void
  2647. atomic_signal_fence(memory_order order)
  2648. {
  2649. switch(order)
  2650. {
  2651. case memory_order_acquire:
  2652. case memory_order_release:
  2653. case memory_order_acq_rel:
  2654. case memory_order_seq_cst:
  2655. __asm__ __volatile__ ("" ::: "memory");
  2656. break;
  2657. default:;
  2658. }
  2659. }
  2660. }
  2661. #endif /* !defined(BOOST_ATOMIC_FORCE_FALLBACK) */
  2662. #endif