gcc-sparcv9.hpp 37 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333
  1. #ifndef BOOST_ATOMIC_DETAIL_GCC_SPARC_HPP
  2. #define BOOST_ATOMIC_DETAIL_GCC_SPARC_HPP
  3. // Copyright (c) 2010 Helge Bahmann
  4. // Copyright (c) 2013 Tim Blechmann
  5. //
  6. // Distributed under the Boost Software License, Version 1.0.
  7. // See accompanying file LICENSE_1_0.txt or copy at
  8. // http://www.boost.org/LICENSE_1_0.txt)
  9. #include <string.h>
  10. #include <cstddef>
  11. #include <boost/cstdint.hpp>
  12. #include <boost/atomic/detail/config.hpp>
  13. #ifdef BOOST_HAS_PRAGMA_ONCE
  14. #pragma once
  15. #endif
  16. namespace boost {
  17. namespace atomics {
  18. namespace detail {
  19. inline void
  20. platform_fence_before(memory_order order)
  21. {
  22. switch(order)
  23. {
  24. case memory_order_relaxed:
  25. case memory_order_acquire:
  26. case memory_order_consume:
  27. break;
  28. case memory_order_release:
  29. case memory_order_acq_rel:
  30. __asm__ __volatile__ ("membar #StoreStore | #LoadStore" ::: "memory");
  31. /* release */
  32. break;
  33. case memory_order_seq_cst:
  34. __asm__ __volatile__ ("membar #Sync" ::: "memory");
  35. /* seq */
  36. break;
  37. }
  38. }
  39. inline void
  40. platform_fence_after(memory_order order)
  41. {
  42. switch(order)
  43. {
  44. case memory_order_relaxed:
  45. case memory_order_release:
  46. break;
  47. case memory_order_acquire:
  48. case memory_order_acq_rel:
  49. __asm__ __volatile__ ("membar #LoadLoad | #LoadStore" ::: "memory");
  50. /* acquire */
  51. break;
  52. case memory_order_consume:
  53. /* consume */
  54. break;
  55. case memory_order_seq_cst:
  56. __asm__ __volatile__ ("membar #Sync" ::: "memory");
  57. /* seq */
  58. break;
  59. default:;
  60. }
  61. }
  62. inline void
  63. platform_fence_after_store(memory_order order)
  64. {
  65. switch(order)
  66. {
  67. case memory_order_seq_cst:
  68. __asm__ __volatile__ ("membar #Sync" ::: "memory");
  69. default:;
  70. }
  71. }
  72. inline void
  73. platform_fence_after_load(memory_order order)
  74. {
  75. platform_fence_after(order);
  76. }
  77. }
  78. }
  79. class atomic_flag
  80. {
  81. private:
  82. atomic_flag(const atomic_flag &) /* = delete */ ;
  83. atomic_flag & operator=(const atomic_flag &) /* = delete */ ;
  84. uint32_t v_;
  85. public:
  86. BOOST_CONSTEXPR atomic_flag(void) BOOST_NOEXCEPT : v_(0) {}
  87. void
  88. clear(memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  89. {
  90. atomics::detail::platform_fence_before(order);
  91. const_cast<volatile uint32_t &>(v_) = 0;
  92. atomics::detail::platform_fence_after_store(order);
  93. }
  94. bool
  95. test_and_set(memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  96. {
  97. atomics::detail::platform_fence_before(order);
  98. uint32_t tmp = 1;
  99. __asm__ (
  100. "cas [%1], %2, %0"
  101. : "+r" (tmp)
  102. : "r" (&v_), "r" (0)
  103. : "memory"
  104. );
  105. atomics::detail::platform_fence_after(order);
  106. return tmp;
  107. }
  108. };
  109. } /* namespace boost */
  110. #define BOOST_ATOMIC_FLAG_LOCK_FREE 2
  111. #include <boost/atomic/detail/base.hpp>
  112. #if !defined(BOOST_ATOMIC_FORCE_FALLBACK)
  113. #define BOOST_ATOMIC_CHAR_LOCK_FREE 2
  114. #define BOOST_ATOMIC_CHAR16_T_LOCK_FREE 2
  115. #define BOOST_ATOMIC_CHAR32_T_LOCK_FREE 2
  116. #define BOOST_ATOMIC_WCHAR_T_LOCK_FREE 2
  117. #define BOOST_ATOMIC_SHORT_LOCK_FREE 2
  118. #define BOOST_ATOMIC_INT_LOCK_FREE 2
  119. #define BOOST_ATOMIC_LONG_LOCK_FREE 2
  120. #define BOOST_ATOMIC_LLONG_LOCK_FREE 0
  121. #define BOOST_ATOMIC_POINTER_LOCK_FREE 2
  122. #define BOOST_ATOMIC_BOOL_LOCK_FREE 2
  123. namespace boost {
  124. #define BOOST_ATOMIC_THREAD_FENCE 2
  125. inline void
  126. atomic_thread_fence(memory_order order)
  127. {
  128. switch(order)
  129. {
  130. case memory_order_relaxed:
  131. break;
  132. case memory_order_release:
  133. __asm__ __volatile__ ("membar #StoreStore | #LoadStore" ::: "memory");
  134. break;
  135. case memory_order_acquire:
  136. __asm__ __volatile__ ("membar #LoadLoad | #LoadStore" ::: "memory");
  137. break;
  138. case memory_order_acq_rel:
  139. __asm__ __volatile__ ("membar #LoadLoad | #LoadStore | #StoreStore" ::: "memory");
  140. break;
  141. case memory_order_consume:
  142. break;
  143. case memory_order_seq_cst:
  144. __asm__ __volatile__ ("membar #Sync" ::: "memory");
  145. break;
  146. default:;
  147. }
  148. }
  149. #define BOOST_ATOMIC_SIGNAL_FENCE 2
  150. inline void
  151. atomic_signal_fence(memory_order)
  152. {
  153. __asm__ __volatile__ ("" ::: "memory");
  154. }
  155. namespace atomics {
  156. namespace detail {
  157. /* integral types */
  158. template<typename T>
  159. class base_atomic<T, int, 1, true>
  160. {
  161. private:
  162. typedef base_atomic this_type;
  163. typedef T value_type;
  164. typedef T difference_type;
  165. typedef int32_t storage_type;
  166. protected:
  167. typedef value_type value_arg_type;
  168. public:
  169. BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
  170. BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : v_(v) {}
  171. void
  172. store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  173. {
  174. platform_fence_before(order);
  175. const_cast<volatile storage_type &>(v_) = v;
  176. platform_fence_after_store(order);
  177. }
  178. value_type
  179. load(memory_order order = memory_order_seq_cst) const volatile BOOST_NOEXCEPT
  180. {
  181. value_type v = const_cast<const volatile storage_type &>(v_);
  182. platform_fence_after_load(order);
  183. return v;
  184. }
  185. value_type
  186. fetch_add(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  187. {
  188. value_type tmp = load(memory_order_relaxed);
  189. do {} while(!compare_exchange_weak(tmp, tmp + v, order, memory_order_relaxed));
  190. return tmp;
  191. }
  192. value_type
  193. fetch_sub(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  194. {
  195. value_type tmp = load(memory_order_relaxed);
  196. do {} while(!compare_exchange_weak(tmp, tmp - v, order, memory_order_relaxed));
  197. return tmp;
  198. }
  199. value_type
  200. exchange(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  201. {
  202. value_type tmp = load(memory_order_relaxed);
  203. do {} while(!compare_exchange_weak(tmp, v, order, memory_order_relaxed));
  204. return tmp;
  205. }
  206. bool
  207. compare_exchange_strong(
  208. value_type & expected,
  209. value_type desired,
  210. memory_order success_order,
  211. memory_order failure_order) volatile BOOST_NOEXCEPT
  212. {
  213. platform_fence_before(success_order);
  214. storage_type desired_s = desired;
  215. __asm__ (
  216. "cas [%1], %2, %0"
  217. : "+r" (desired_s)
  218. : "r" (&v_), "r" ((storage_type)expected)
  219. : "memory"
  220. );
  221. desired = desired_s;
  222. bool success = (desired == expected);
  223. if (success)
  224. platform_fence_after(success_order);
  225. else
  226. platform_fence_after(failure_order);
  227. expected = desired;
  228. return success;
  229. }
  230. bool
  231. compare_exchange_weak(
  232. value_type & expected,
  233. value_type desired,
  234. memory_order success_order,
  235. memory_order failure_order) volatile BOOST_NOEXCEPT
  236. {
  237. return compare_exchange_strong(expected, desired, success_order, failure_order);
  238. }
  239. value_type
  240. fetch_and(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  241. {
  242. value_type tmp = load(memory_order_relaxed);
  243. do {} while(!compare_exchange_weak(tmp, tmp & v, order, memory_order_relaxed));
  244. return tmp;
  245. }
  246. value_type
  247. fetch_or(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  248. {
  249. value_type tmp = load(memory_order_relaxed);
  250. do {} while(!compare_exchange_weak(tmp, tmp | v, order, memory_order_relaxed));
  251. return tmp;
  252. }
  253. value_type
  254. fetch_xor(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  255. {
  256. value_type tmp = load(memory_order_relaxed);
  257. do {} while(!compare_exchange_weak(tmp, tmp ^ v, order, memory_order_relaxed));
  258. return tmp;
  259. }
  260. bool
  261. is_lock_free(void) const volatile BOOST_NOEXCEPT
  262. {
  263. return true;
  264. }
  265. BOOST_ATOMIC_DECLARE_INTEGRAL_OPERATORS
  266. BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
  267. BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
  268. private:
  269. storage_type v_;
  270. };
  271. template<typename T>
  272. class base_atomic<T, int, 1, false>
  273. {
  274. private:
  275. typedef base_atomic this_type;
  276. typedef T value_type;
  277. typedef T difference_type;
  278. typedef uint32_t storage_type;
  279. protected:
  280. typedef value_type value_arg_type;
  281. public:
  282. BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
  283. BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : v_(v) {}
  284. void
  285. store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  286. {
  287. platform_fence_before(order);
  288. const_cast<volatile storage_type &>(v_) = v;
  289. platform_fence_after_store(order);
  290. }
  291. value_type
  292. load(memory_order order = memory_order_seq_cst) const volatile BOOST_NOEXCEPT
  293. {
  294. value_type v = const_cast<const volatile storage_type &>(v_);
  295. platform_fence_after_load(order);
  296. return v;
  297. }
  298. value_type
  299. fetch_add(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  300. {
  301. value_type tmp = load(memory_order_relaxed);
  302. do {} while(!compare_exchange_weak(tmp, tmp + v, order, memory_order_relaxed));
  303. return tmp;
  304. }
  305. value_type
  306. fetch_sub(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  307. {
  308. value_type tmp = load(memory_order_relaxed);
  309. do {} while(!compare_exchange_weak(tmp, tmp - v, order, memory_order_relaxed));
  310. return tmp;
  311. }
  312. value_type
  313. exchange(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  314. {
  315. value_type tmp = load(memory_order_relaxed);
  316. do {} while(!compare_exchange_weak(tmp, v, order, memory_order_relaxed));
  317. return tmp;
  318. }
  319. bool
  320. compare_exchange_strong(
  321. value_type & expected,
  322. value_type desired,
  323. memory_order success_order,
  324. memory_order failure_order) volatile BOOST_NOEXCEPT
  325. {
  326. platform_fence_before(success_order);
  327. storage_type desired_s = desired;
  328. __asm__ (
  329. "cas [%1], %2, %0"
  330. : "+r" (desired_s)
  331. : "r" (&v_), "r" ((storage_type)expected)
  332. : "memory"
  333. );
  334. desired = desired_s;
  335. bool success = (desired == expected);
  336. if (success)
  337. platform_fence_after(success_order);
  338. else
  339. platform_fence_after(failure_order);
  340. expected = desired;
  341. return success;
  342. }
  343. bool
  344. compare_exchange_weak(
  345. value_type & expected,
  346. value_type desired,
  347. memory_order success_order,
  348. memory_order failure_order) volatile BOOST_NOEXCEPT
  349. {
  350. return compare_exchange_strong(expected, desired, success_order, failure_order);
  351. }
  352. value_type
  353. fetch_and(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  354. {
  355. value_type tmp = load(memory_order_relaxed);
  356. do {} while(!compare_exchange_weak(tmp, tmp & v, order, memory_order_relaxed));
  357. return tmp;
  358. }
  359. value_type
  360. fetch_or(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  361. {
  362. value_type tmp = load(memory_order_relaxed);
  363. do {} while(!compare_exchange_weak(tmp, tmp | v, order, memory_order_relaxed));
  364. return tmp;
  365. }
  366. value_type
  367. fetch_xor(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  368. {
  369. value_type tmp = load(memory_order_relaxed);
  370. do {} while(!compare_exchange_weak(tmp, tmp ^ v, order, memory_order_relaxed));
  371. return tmp;
  372. }
  373. bool
  374. is_lock_free(void) const volatile BOOST_NOEXCEPT
  375. {
  376. return true;
  377. }
  378. BOOST_ATOMIC_DECLARE_INTEGRAL_OPERATORS
  379. BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
  380. BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
  381. private:
  382. storage_type v_;
  383. };
  384. template<typename T>
  385. class base_atomic<T, int, 2, true>
  386. {
  387. private:
  388. typedef base_atomic this_type;
  389. typedef T value_type;
  390. typedef T difference_type;
  391. typedef int32_t storage_type;
  392. protected:
  393. typedef value_type value_arg_type;
  394. public:
  395. BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
  396. BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : v_(v) {}
  397. void
  398. store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  399. {
  400. platform_fence_before(order);
  401. const_cast<volatile storage_type &>(v_) = v;
  402. platform_fence_after_store(order);
  403. }
  404. value_type
  405. load(memory_order order = memory_order_seq_cst) const volatile BOOST_NOEXCEPT
  406. {
  407. value_type v = const_cast<const volatile storage_type &>(v_);
  408. platform_fence_after_load(order);
  409. return v;
  410. }
  411. value_type
  412. fetch_add(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  413. {
  414. value_type tmp = load(memory_order_relaxed);
  415. do {} while(!compare_exchange_weak(tmp, tmp + v, order, memory_order_relaxed));
  416. return tmp;
  417. }
  418. value_type
  419. fetch_sub(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  420. {
  421. value_type tmp = load(memory_order_relaxed);
  422. do {} while(!compare_exchange_weak(tmp, tmp - v, order, memory_order_relaxed));
  423. return tmp;
  424. }
  425. value_type
  426. exchange(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  427. {
  428. value_type tmp = load(memory_order_relaxed);
  429. do {} while(!compare_exchange_weak(tmp, v, order, memory_order_relaxed));
  430. return tmp;
  431. }
  432. bool
  433. compare_exchange_strong(
  434. value_type & expected,
  435. value_type desired,
  436. memory_order success_order,
  437. memory_order failure_order) volatile BOOST_NOEXCEPT
  438. {
  439. platform_fence_before(success_order);
  440. storage_type desired_s = desired;
  441. __asm__ (
  442. "cas [%1], %2, %0"
  443. : "+r" (desired_s)
  444. : "r" (&v_), "r" ((storage_type)expected)
  445. : "memory"
  446. );
  447. desired = desired_s;
  448. bool success = (desired == expected);
  449. if (success)
  450. platform_fence_after(success_order);
  451. else
  452. platform_fence_after(failure_order);
  453. expected = desired;
  454. return success;
  455. }
  456. bool
  457. compare_exchange_weak(
  458. value_type & expected,
  459. value_type desired,
  460. memory_order success_order,
  461. memory_order failure_order) volatile BOOST_NOEXCEPT
  462. {
  463. return compare_exchange_strong(expected, desired, success_order, failure_order);
  464. }
  465. value_type
  466. fetch_and(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  467. {
  468. value_type tmp = load(memory_order_relaxed);
  469. do {} while(!compare_exchange_weak(tmp, tmp & v, order, memory_order_relaxed));
  470. return tmp;
  471. }
  472. value_type
  473. fetch_or(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  474. {
  475. value_type tmp = load(memory_order_relaxed);
  476. do {} while(!compare_exchange_weak(tmp, tmp | v, order, memory_order_relaxed));
  477. return tmp;
  478. }
  479. value_type
  480. fetch_xor(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  481. {
  482. value_type tmp = load(memory_order_relaxed);
  483. do {} while(!compare_exchange_weak(tmp, tmp ^ v, order, memory_order_relaxed));
  484. return tmp;
  485. }
  486. bool
  487. is_lock_free(void) const volatile BOOST_NOEXCEPT
  488. {
  489. return true;
  490. }
  491. BOOST_ATOMIC_DECLARE_INTEGRAL_OPERATORS
  492. BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
  493. BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
  494. private:
  495. storage_type v_;
  496. };
  497. template<typename T>
  498. class base_atomic<T, int, 2, false>
  499. {
  500. private:
  501. typedef base_atomic this_type;
  502. typedef T value_type;
  503. typedef T difference_type;
  504. typedef uint32_t storage_type;
  505. protected:
  506. typedef value_type value_arg_type;
  507. public:
  508. BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
  509. BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : v_(v) {}
  510. void
  511. store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  512. {
  513. platform_fence_before(order);
  514. const_cast<volatile storage_type &>(v_) = v;
  515. platform_fence_after_store(order);
  516. }
  517. value_type
  518. load(memory_order order = memory_order_seq_cst) const volatile BOOST_NOEXCEPT
  519. {
  520. value_type v = const_cast<const volatile storage_type &>(v_);
  521. platform_fence_after_load(order);
  522. return v;
  523. }
  524. value_type
  525. fetch_add(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  526. {
  527. value_type tmp = load(memory_order_relaxed);
  528. do {} while(!compare_exchange_weak(tmp, tmp + v, order, memory_order_relaxed));
  529. return tmp;
  530. }
  531. value_type
  532. fetch_sub(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  533. {
  534. value_type tmp = load(memory_order_relaxed);
  535. do {} while(!compare_exchange_weak(tmp, tmp - v, order, memory_order_relaxed));
  536. return tmp;
  537. }
  538. value_type
  539. exchange(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  540. {
  541. value_type tmp = load(memory_order_relaxed);
  542. do {} while(!compare_exchange_weak(tmp, v, order, memory_order_relaxed));
  543. return tmp;
  544. }
  545. bool
  546. compare_exchange_strong(
  547. value_type & expected,
  548. value_type desired,
  549. memory_order success_order,
  550. memory_order failure_order) volatile BOOST_NOEXCEPT
  551. {
  552. platform_fence_before(success_order);
  553. storage_type desired_s = desired;
  554. __asm__ (
  555. "cas [%1], %2, %0"
  556. : "+r" (desired_s)
  557. : "r" (&v_), "r" ((storage_type)expected)
  558. : "memory"
  559. );
  560. desired = desired_s;
  561. bool success = (desired == expected);
  562. if (success)
  563. platform_fence_after(success_order);
  564. else
  565. platform_fence_after(failure_order);
  566. expected = desired;
  567. return success;
  568. }
  569. bool
  570. compare_exchange_weak(
  571. value_type & expected,
  572. value_type desired,
  573. memory_order success_order,
  574. memory_order failure_order) volatile BOOST_NOEXCEPT
  575. {
  576. return compare_exchange_strong(expected, desired, success_order, failure_order);
  577. }
  578. value_type
  579. fetch_and(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  580. {
  581. value_type tmp = load(memory_order_relaxed);
  582. do {} while(!compare_exchange_weak(tmp, tmp & v, order, memory_order_relaxed));
  583. return tmp;
  584. }
  585. value_type
  586. fetch_or(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  587. {
  588. value_type tmp = load(memory_order_relaxed);
  589. do {} while(!compare_exchange_weak(tmp, tmp | v, order, memory_order_relaxed));
  590. return tmp;
  591. }
  592. value_type
  593. fetch_xor(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  594. {
  595. value_type tmp = load(memory_order_relaxed);
  596. do {} while(!compare_exchange_weak(tmp, tmp ^ v, order, memory_order_relaxed));
  597. return tmp;
  598. }
  599. bool
  600. is_lock_free(void) const volatile BOOST_NOEXCEPT
  601. {
  602. return true;
  603. }
  604. BOOST_ATOMIC_DECLARE_INTEGRAL_OPERATORS
  605. BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
  606. BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
  607. private:
  608. storage_type v_;
  609. };
  610. template<typename T, bool Sign>
  611. class base_atomic<T, int, 4, Sign>
  612. {
  613. private:
  614. typedef base_atomic this_type;
  615. typedef T value_type;
  616. typedef T difference_type;
  617. protected:
  618. typedef value_type value_arg_type;
  619. public:
  620. BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
  621. BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : v_(v) {}
  622. void
  623. store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  624. {
  625. platform_fence_before(order);
  626. const_cast<volatile value_type &>(v_) = v;
  627. platform_fence_after_store(order);
  628. }
  629. value_type
  630. load(memory_order order = memory_order_seq_cst)const volatile BOOST_NOEXCEPT
  631. {
  632. value_type v = const_cast<const volatile value_type &>(v_);
  633. platform_fence_after_load(order);
  634. return v;
  635. }
  636. value_type
  637. fetch_add(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  638. {
  639. value_type tmp = load(memory_order_relaxed);
  640. do {} while(!compare_exchange_weak(tmp, tmp + v, order, memory_order_relaxed));
  641. return tmp;
  642. }
  643. value_type
  644. fetch_sub(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  645. {
  646. value_type tmp = load(memory_order_relaxed);
  647. do {} while(!compare_exchange_weak(tmp, tmp - v, order, memory_order_relaxed));
  648. return tmp;
  649. }
  650. value_type
  651. exchange(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  652. {
  653. value_type tmp = load(memory_order_relaxed);
  654. do {} while(!compare_exchange_weak(tmp, v, order, memory_order_relaxed));
  655. return tmp;
  656. }
  657. bool
  658. compare_exchange_strong(
  659. value_type & expected,
  660. value_type desired,
  661. memory_order success_order,
  662. memory_order failure_order) volatile BOOST_NOEXCEPT
  663. {
  664. platform_fence_before(success_order);
  665. __asm__ (
  666. "cas [%1], %2, %0"
  667. : "+r" (desired)
  668. : "r" (&v_), "r" (expected)
  669. : "memory"
  670. );
  671. bool success = (desired == expected);
  672. if (success)
  673. platform_fence_after(success_order);
  674. else
  675. platform_fence_after(failure_order);
  676. expected = desired;
  677. return success;
  678. }
  679. bool
  680. compare_exchange_weak(
  681. value_type & expected,
  682. value_type desired,
  683. memory_order success_order,
  684. memory_order failure_order) volatile BOOST_NOEXCEPT
  685. {
  686. return compare_exchange_strong(expected, desired, success_order, failure_order);
  687. }
  688. value_type
  689. fetch_and(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  690. {
  691. value_type tmp = load(memory_order_relaxed);
  692. do {} while(!compare_exchange_weak(tmp, tmp & v, order, memory_order_relaxed));
  693. return tmp;
  694. }
  695. value_type
  696. fetch_or(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  697. {
  698. value_type tmp = load(memory_order_relaxed);
  699. do {} while(!compare_exchange_weak(tmp, tmp | v, order, memory_order_relaxed));
  700. return tmp;
  701. }
  702. value_type
  703. fetch_xor(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  704. {
  705. value_type tmp = load(memory_order_relaxed);
  706. do {} while(!compare_exchange_weak(tmp, tmp ^ v, order, memory_order_relaxed));
  707. return tmp;
  708. }
  709. bool
  710. is_lock_free(void) const volatile BOOST_NOEXCEPT
  711. {
  712. return true;
  713. }
  714. BOOST_ATOMIC_DECLARE_INTEGRAL_OPERATORS
  715. BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
  716. BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
  717. private:
  718. value_type v_;
  719. };
  720. /* pointer types */
  721. template<bool Sign>
  722. class base_atomic<void *, void *, 4, Sign>
  723. {
  724. private:
  725. typedef base_atomic this_type;
  726. typedef std::ptrdiff_t difference_type;
  727. typedef void * value_type;
  728. protected:
  729. typedef value_type value_arg_type;
  730. public:
  731. BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
  732. BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : v_(v) {}
  733. void
  734. store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  735. {
  736. platform_fence_before(order);
  737. const_cast<volatile value_type &>(v_) = v;
  738. platform_fence_after_store(order);
  739. }
  740. value_type load(memory_order order = memory_order_seq_cst)const volatile BOOST_NOEXCEPT
  741. {
  742. value_type v = const_cast<const volatile value_type &>(v_);
  743. platform_fence_after_load(order);
  744. return v;
  745. }
  746. value_type
  747. exchange(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  748. {
  749. value_type tmp = load(memory_order_relaxed);
  750. do {} while(!compare_exchange_weak(tmp, v, order, memory_order_relaxed));
  751. return tmp;
  752. }
  753. bool
  754. compare_exchange_strong(
  755. value_type & expected,
  756. value_type desired,
  757. memory_order success_order,
  758. memory_order failure_order) volatile BOOST_NOEXCEPT
  759. {
  760. platform_fence_before(success_order);
  761. __asm__ (
  762. "cas [%1], %2, %0"
  763. : "+r" (desired)
  764. : "r" (&v_), "r" (expected)
  765. : "memory"
  766. );
  767. bool success = (desired == expected);
  768. if (success)
  769. platform_fence_after(success_order);
  770. else
  771. platform_fence_after(failure_order);
  772. expected = desired;
  773. return success;
  774. }
  775. bool compare_exchange_weak(value_type & expected, value_type desired,
  776. memory_order success_order,
  777. memory_order failure_order) volatile BOOST_NOEXCEPT
  778. {
  779. return compare_exchange_strong(expected, desired, success_order, failure_order);
  780. }
  781. bool
  782. is_lock_free(void) const volatile BOOST_NOEXCEPT
  783. {
  784. return true;
  785. }
  786. value_type
  787. fetch_add(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  788. {
  789. value_type tmp = load(memory_order_relaxed);
  790. do {} while(!compare_exchange_weak(tmp, (char*)tmp + v, order, memory_order_relaxed));
  791. return tmp;
  792. }
  793. value_type
  794. fetch_sub(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  795. {
  796. value_type tmp = load(memory_order_relaxed);
  797. do {} while(!compare_exchange_weak(tmp, (char*)tmp - v, order, memory_order_relaxed));
  798. return tmp;
  799. }
  800. BOOST_ATOMIC_DECLARE_VOID_POINTER_OPERATORS
  801. BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
  802. BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
  803. private:
  804. value_type v_;
  805. };
  806. template<typename T, bool Sign>
  807. class base_atomic<T *, void *, 4, Sign>
  808. {
  809. private:
  810. typedef base_atomic this_type;
  811. typedef T * value_type;
  812. typedef std::ptrdiff_t difference_type;
  813. protected:
  814. typedef value_type value_arg_type;
  815. public:
  816. BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
  817. BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : v_(v) {}
  818. void
  819. store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  820. {
  821. platform_fence_before(order);
  822. const_cast<volatile value_type &>(v_) = v;
  823. platform_fence_after_store(order);
  824. }
  825. value_type
  826. load(memory_order order = memory_order_seq_cst)const volatile BOOST_NOEXCEPT
  827. {
  828. value_type v = const_cast<const volatile value_type &>(v_);
  829. platform_fence_after_load(order);
  830. return v;
  831. }
  832. value_type
  833. exchange(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  834. {
  835. value_type tmp = load(memory_order_relaxed);
  836. do {} while(!compare_exchange_weak(tmp, v, order, memory_order_relaxed));
  837. return tmp;
  838. }
  839. bool
  840. compare_exchange_strong(
  841. value_type & expected,
  842. value_type desired,
  843. memory_order success_order,
  844. memory_order failure_order) volatile BOOST_NOEXCEPT
  845. {
  846. platform_fence_before(success_order);
  847. __asm__ (
  848. "cas [%1], %2, %0"
  849. : "+r" (desired)
  850. : "r" (&v_), "r" (expected)
  851. : "memory"
  852. );
  853. bool success = (desired == expected);
  854. if (success)
  855. platform_fence_after(success_order);
  856. else
  857. platform_fence_after(failure_order);
  858. expected = desired;
  859. return success;
  860. }
  861. bool
  862. compare_exchange_weak(
  863. value_type & expected,
  864. value_type desired,
  865. memory_order success_order,
  866. memory_order failure_order) volatile BOOST_NOEXCEPT
  867. {
  868. return compare_exchange_strong(expected, desired, success_order, failure_order);
  869. }
  870. value_type
  871. fetch_add(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  872. {
  873. value_type tmp = load(memory_order_relaxed);
  874. do {} while(!compare_exchange_weak(tmp, tmp + v, order, memory_order_relaxed));
  875. return tmp;
  876. }
  877. value_type
  878. fetch_sub(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  879. {
  880. value_type tmp = load(memory_order_relaxed);
  881. do {} while(!compare_exchange_weak(tmp, tmp - v, order, memory_order_relaxed));
  882. return tmp;
  883. }
  884. bool
  885. is_lock_free(void) const volatile BOOST_NOEXCEPT
  886. {
  887. return true;
  888. }
  889. BOOST_ATOMIC_DECLARE_POINTER_OPERATORS
  890. BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
  891. BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
  892. private:
  893. value_type v_;
  894. };
  895. /* generic types */
  896. template<typename T, bool Sign>
  897. class base_atomic<T, void, 1, Sign>
  898. {
  899. private:
  900. typedef base_atomic this_type;
  901. typedef T value_type;
  902. typedef uint32_t storage_type;
  903. protected:
  904. typedef value_type const& value_arg_type;
  905. public:
  906. BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
  907. BOOST_CONSTEXPR explicit base_atomic(value_type const& v) BOOST_NOEXCEPT : v_(0)
  908. {
  909. memcpy(&v_, &v, sizeof(value_type));
  910. }
  911. void
  912. store(value_type const& v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  913. {
  914. storage_type tmp = 0;
  915. memcpy(&tmp, &v, sizeof(value_type));
  916. platform_fence_before(order);
  917. const_cast<volatile storage_type &>(v_) = tmp;
  918. platform_fence_after_store(order);
  919. }
  920. value_type
  921. load(memory_order order = memory_order_seq_cst)const volatile BOOST_NOEXCEPT
  922. {
  923. storage_type tmp = const_cast<volatile storage_type &>(v_);
  924. platform_fence_after_load(order);
  925. value_type v;
  926. memcpy(&v, &tmp, sizeof(value_type));
  927. return v;
  928. }
  929. value_type
  930. exchange(value_type const& v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  931. {
  932. value_type tmp = load(memory_order_relaxed);
  933. do {} while(!compare_exchange_weak(tmp, v, order, memory_order_relaxed));
  934. return tmp;
  935. }
  936. bool
  937. compare_exchange_strong(
  938. value_type & expected,
  939. value_type const& desired,
  940. memory_order success_order,
  941. memory_order failure_order) volatile BOOST_NOEXCEPT
  942. {
  943. storage_type expected_s = 0, desired_s = 0;
  944. memcpy(&expected_s, &expected, sizeof(value_type));
  945. memcpy(&desired_s, &desired, sizeof(value_type));
  946. platform_fence_before(success_order);
  947. __asm__ (
  948. "cas [%1], %2, %0"
  949. : "+r" (desired_s)
  950. : "r" (&v_), "r" (expected_s)
  951. : "memory"
  952. );
  953. bool success = (desired_s == expected_s);
  954. if (success)
  955. platform_fence_after(success_order);
  956. else
  957. platform_fence_after(failure_order);
  958. memcpy(&expected, &desired_s, sizeof(value_type));
  959. return success;
  960. }
  961. bool
  962. compare_exchange_weak(
  963. value_type & expected,
  964. value_type const& desired,
  965. memory_order success_order,
  966. memory_order failure_order) volatile BOOST_NOEXCEPT
  967. {
  968. return compare_exchange_strong(expected, desired, success_order, failure_order);
  969. }
  970. bool
  971. is_lock_free(void) const volatile BOOST_NOEXCEPT
  972. {
  973. return true;
  974. }
  975. BOOST_ATOMIC_DECLARE_BASE_OPERATORS
  976. BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
  977. BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
  978. private:
  979. storage_type v_;
  980. };
  981. template<typename T, bool Sign>
  982. class base_atomic<T, void, 2, Sign>
  983. {
  984. private:
  985. typedef base_atomic this_type;
  986. typedef T value_type;
  987. typedef uint32_t storage_type;
  988. protected:
  989. typedef value_type const& value_arg_type;
  990. public:
  991. BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
  992. BOOST_CONSTEXPR explicit base_atomic(value_type const& v) BOOST_NOEXCEPT : v_(0)
  993. {
  994. memcpy(&v_, &v, sizeof(value_type));
  995. }
  996. void
  997. store(value_type const& v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  998. {
  999. storage_type tmp = 0;
  1000. memcpy(&tmp, &v, sizeof(value_type));
  1001. platform_fence_before(order);
  1002. const_cast<volatile storage_type &>(v_) = tmp;
  1003. platform_fence_after_store(order);
  1004. }
  1005. value_type
  1006. load(memory_order order = memory_order_seq_cst)const volatile BOOST_NOEXCEPT
  1007. {
  1008. storage_type tmp = const_cast<volatile storage_type &>(v_);
  1009. platform_fence_after_load(order);
  1010. value_type v;
  1011. memcpy(&v, &tmp, sizeof(value_type));
  1012. return v;
  1013. }
  1014. value_type
  1015. exchange(value_type const& v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  1016. {
  1017. value_type tmp = load(memory_order_relaxed);
  1018. do {} while(!compare_exchange_weak(tmp, v, order, memory_order_relaxed));
  1019. return tmp;
  1020. }
  1021. bool
  1022. compare_exchange_strong(
  1023. value_type & expected,
  1024. value_type const& desired,
  1025. memory_order success_order,
  1026. memory_order failure_order) volatile BOOST_NOEXCEPT
  1027. {
  1028. storage_type expected_s = 0, desired_s = 0;
  1029. memcpy(&expected_s, &expected, sizeof(value_type));
  1030. memcpy(&desired_s, &desired, sizeof(value_type));
  1031. platform_fence_before(success_order);
  1032. __asm__ (
  1033. "cas [%1], %2, %0"
  1034. : "+r" (desired_s)
  1035. : "r" (&v_), "r" (expected_s)
  1036. : "memory"
  1037. );
  1038. bool success = (desired_s == expected_s);
  1039. if (success)
  1040. platform_fence_after(success_order);
  1041. else
  1042. platform_fence_after(failure_order);
  1043. memcpy(&expected, &desired_s, sizeof(value_type));
  1044. return success;
  1045. }
  1046. bool
  1047. compare_exchange_weak(
  1048. value_type & expected,
  1049. value_type const& desired,
  1050. memory_order success_order,
  1051. memory_order failure_order) volatile BOOST_NOEXCEPT
  1052. {
  1053. return compare_exchange_strong(expected, desired, success_order, failure_order);
  1054. }
  1055. bool
  1056. is_lock_free(void) const volatile BOOST_NOEXCEPT
  1057. {
  1058. return true;
  1059. }
  1060. BOOST_ATOMIC_DECLARE_BASE_OPERATORS
  1061. BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
  1062. BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
  1063. private:
  1064. storage_type v_;
  1065. };
  1066. template<typename T, bool Sign>
  1067. class base_atomic<T, void, 4, Sign>
  1068. {
  1069. private:
  1070. typedef base_atomic this_type;
  1071. typedef T value_type;
  1072. typedef uint32_t storage_type;
  1073. protected:
  1074. typedef value_type const& value_arg_type;
  1075. public:
  1076. BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
  1077. BOOST_CONSTEXPR explicit base_atomic(value_type const& v) BOOST_NOEXCEPT : v_(0)
  1078. {
  1079. memcpy(&v_, &v, sizeof(value_type));
  1080. }
  1081. void
  1082. store(value_type const& v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  1083. {
  1084. storage_type tmp = 0;
  1085. memcpy(&tmp, &v, sizeof(value_type));
  1086. platform_fence_before(order);
  1087. const_cast<volatile storage_type &>(v_) = tmp;
  1088. platform_fence_after_store(order);
  1089. }
  1090. value_type
  1091. load(memory_order order = memory_order_seq_cst)const volatile BOOST_NOEXCEPT
  1092. {
  1093. storage_type tmp = const_cast<volatile storage_type &>(v_);
  1094. platform_fence_after_load(order);
  1095. value_type v;
  1096. memcpy(&v, &tmp, sizeof(value_type));
  1097. return v;
  1098. }
  1099. value_type
  1100. exchange(value_type const& v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  1101. {
  1102. value_type tmp = load(memory_order_relaxed);
  1103. do {} while(!compare_exchange_weak(tmp, v, order, memory_order_relaxed));
  1104. return tmp;
  1105. }
  1106. bool
  1107. compare_exchange_strong(
  1108. value_type & expected,
  1109. value_type const& desired,
  1110. memory_order success_order,
  1111. memory_order failure_order) volatile BOOST_NOEXCEPT
  1112. {
  1113. storage_type expected_s = 0, desired_s = 0;
  1114. memcpy(&expected_s, &expected, sizeof(value_type));
  1115. memcpy(&desired_s, &desired, sizeof(value_type));
  1116. platform_fence_before(success_order);
  1117. __asm__ (
  1118. "cas [%1], %2, %0"
  1119. : "+r" (desired_s)
  1120. : "r" (&v_), "r" (expected_s)
  1121. : "memory"
  1122. );
  1123. bool success = (desired_s == expected_s);
  1124. if (success)
  1125. platform_fence_after(success_order);
  1126. else
  1127. platform_fence_after(failure_order);
  1128. memcpy(&expected, &desired_s, sizeof(value_type));
  1129. return success;
  1130. }
  1131. bool
  1132. compare_exchange_weak(
  1133. value_type & expected,
  1134. value_type const& desired,
  1135. memory_order success_order,
  1136. memory_order failure_order) volatile BOOST_NOEXCEPT
  1137. {
  1138. return compare_exchange_strong(expected, desired, success_order, failure_order);
  1139. }
  1140. bool
  1141. is_lock_free(void) const volatile BOOST_NOEXCEPT
  1142. {
  1143. return true;
  1144. }
  1145. BOOST_ATOMIC_DECLARE_BASE_OPERATORS
  1146. BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
  1147. BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
  1148. private:
  1149. storage_type v_;
  1150. };
  1151. #endif /* !defined(BOOST_ATOMIC_FORCE_FALLBACK) */
  1152. }
  1153. }
  1154. }
  1155. #endif