windows.hpp 48 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669
  1. #ifndef BOOST_ATOMIC_DETAIL_WINDOWS_HPP
  2. #define BOOST_ATOMIC_DETAIL_WINDOWS_HPP
  3. // Copyright (c) 2009 Helge Bahmann
  4. // Copyright (c) 2012 Andrey Semashev
  5. // Copyright (c) 2013 Tim Blechmann, Andrey Semashev
  6. //
  7. // Distributed under the Boost Software License, Version 1.0.
  8. // See accompanying file LICENSE_1_0.txt or copy at
  9. // http://www.boost.org/LICENSE_1_0.txt)
  10. #include <string.h>
  11. #include <cstddef>
  12. #include <boost/cstdint.hpp>
  13. #include <boost/type_traits/make_signed.hpp>
  14. #include <boost/atomic/detail/config.hpp>
  15. #include <boost/atomic/detail/interlocked.hpp>
  16. #ifdef BOOST_HAS_PRAGMA_ONCE
  17. #pragma once
  18. #endif
  19. #ifdef _MSC_VER
  20. #pragma warning(push)
  21. // 'order' : unreferenced formal parameter
  22. #pragma warning(disable: 4100)
  23. #endif
  24. #if defined(_MSC_VER) && (defined(_M_AMD64) || defined(_M_IX86))
  25. extern "C" void _mm_pause(void);
  26. #pragma intrinsic(_mm_pause)
  27. #define BOOST_ATOMIC_X86_PAUSE() _mm_pause()
  28. #else
  29. #define BOOST_ATOMIC_X86_PAUSE()
  30. #endif
  31. #if defined(_M_IX86) && _M_IX86 >= 500
  32. #define BOOST_ATOMIC_X86_HAS_CMPXCHG8B 1
  33. #endif
  34. // Define hardware barriers
  35. #if defined(_MSC_VER) && (defined(_M_AMD64) || (defined(_M_IX86) && defined(_M_IX86_FP) && _M_IX86_FP >= 2))
  36. extern "C" void _mm_mfence(void);
  37. #pragma intrinsic(_mm_mfence)
  38. #endif
  39. // Define compiler barriers
  40. #if defined(__INTEL_COMPILER)
  41. #define BOOST_ATOMIC_COMPILER_BARRIER() __memory_barrier()
  42. #elif defined(_MSC_VER) && _MSC_VER >= 1310 && !defined(_WIN32_WCE)
  43. extern "C" void _ReadWriteBarrier(void);
  44. #pragma intrinsic(_ReadWriteBarrier)
  45. #define BOOST_ATOMIC_COMPILER_BARRIER() _ReadWriteBarrier()
  46. #endif
  47. #ifndef BOOST_ATOMIC_COMPILER_BARRIER
  48. #define BOOST_ATOMIC_COMPILER_BARRIER()
  49. #endif
  50. namespace boost {
  51. namespace atomics {
  52. namespace detail {
  53. BOOST_FORCEINLINE void hardware_full_fence(void)
  54. {
  55. #if defined(_MSC_VER) && (defined(_M_AMD64) || (defined(_M_IX86) && defined(_M_IX86_FP) && _M_IX86_FP >= 2))
  56. // Use mfence only if SSE2 is available
  57. _mm_mfence();
  58. #else
  59. long tmp;
  60. BOOST_ATOMIC_INTERLOCKED_EXCHANGE(&tmp, 0);
  61. #endif
  62. }
  63. BOOST_FORCEINLINE void
  64. platform_fence_before(memory_order)
  65. {
  66. BOOST_ATOMIC_COMPILER_BARRIER();
  67. }
  68. BOOST_FORCEINLINE void
  69. platform_fence_after(memory_order)
  70. {
  71. BOOST_ATOMIC_COMPILER_BARRIER();
  72. }
  73. BOOST_FORCEINLINE void
  74. platform_fence_before_store(memory_order)
  75. {
  76. BOOST_ATOMIC_COMPILER_BARRIER();
  77. }
  78. BOOST_FORCEINLINE void
  79. platform_fence_after_store(memory_order)
  80. {
  81. BOOST_ATOMIC_COMPILER_BARRIER();
  82. }
  83. BOOST_FORCEINLINE void
  84. platform_fence_after_load(memory_order order)
  85. {
  86. BOOST_ATOMIC_COMPILER_BARRIER();
  87. // On x86 and x86_64 there is no need for a hardware barrier,
  88. // even if seq_cst memory order is requested, because all
  89. // seq_cst writes are implemented with lock-prefixed operations
  90. // or xchg which has implied lock prefix. Therefore normal loads
  91. // are already ordered with seq_cst stores on these architectures.
  92. #if !(defined(_MSC_VER) && (defined(_M_AMD64) || defined(_M_IX86)))
  93. if (order == memory_order_seq_cst)
  94. hardware_full_fence();
  95. #endif
  96. }
  97. } // namespace detail
  98. } // namespace atomics
  99. #define BOOST_ATOMIC_THREAD_FENCE 2
  100. BOOST_FORCEINLINE void
  101. atomic_thread_fence(memory_order order)
  102. {
  103. BOOST_ATOMIC_COMPILER_BARRIER();
  104. if (order == memory_order_seq_cst)
  105. atomics::detail::hardware_full_fence();
  106. }
  107. #define BOOST_ATOMIC_SIGNAL_FENCE 2
  108. BOOST_FORCEINLINE void
  109. atomic_signal_fence(memory_order)
  110. {
  111. BOOST_ATOMIC_COMPILER_BARRIER();
  112. }
  113. #undef BOOST_ATOMIC_COMPILER_BARRIER
  114. class atomic_flag
  115. {
  116. private:
  117. atomic_flag(const atomic_flag &) /* = delete */ ;
  118. atomic_flag & operator=(const atomic_flag &) /* = delete */ ;
  119. uint32_t v_;
  120. public:
  121. BOOST_CONSTEXPR atomic_flag(void) BOOST_NOEXCEPT : v_(0) {}
  122. bool
  123. test_and_set(memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  124. {
  125. atomics::detail::platform_fence_before(order);
  126. const uint32_t old = (uint32_t)BOOST_ATOMIC_INTERLOCKED_EXCHANGE(&v_, 1);
  127. atomics::detail::platform_fence_after(order);
  128. return old != 0;
  129. }
  130. void
  131. clear(memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  132. {
  133. atomics::detail::platform_fence_before_store(order);
  134. BOOST_ATOMIC_INTERLOCKED_EXCHANGE(&v_, 0);
  135. atomics::detail::platform_fence_after_store(order);
  136. }
  137. };
  138. } // namespace boost
  139. #define BOOST_ATOMIC_FLAG_LOCK_FREE 2
  140. #include <boost/atomic/detail/base.hpp>
  141. #if !defined(BOOST_ATOMIC_FORCE_FALLBACK)
  142. #define BOOST_ATOMIC_CHAR_LOCK_FREE 2
  143. #define BOOST_ATOMIC_SHORT_LOCK_FREE 2
  144. #define BOOST_ATOMIC_INT_LOCK_FREE 2
  145. #define BOOST_ATOMIC_LONG_LOCK_FREE 2
  146. #if defined(BOOST_ATOMIC_X86_HAS_CMPXCHG8B) || defined(_M_AMD64) || defined(_M_IA64)
  147. #define BOOST_ATOMIC_LLONG_LOCK_FREE 2
  148. #else
  149. #define BOOST_ATOMIC_LLONG_LOCK_FREE 0
  150. #endif
  151. #define BOOST_ATOMIC_POINTER_LOCK_FREE 2
  152. #define BOOST_ATOMIC_BOOL_LOCK_FREE 2
  153. namespace boost {
  154. namespace atomics {
  155. namespace detail {
  156. #if defined(_MSC_VER)
  157. #pragma warning(push)
  158. // 'char' : forcing value to bool 'true' or 'false' (performance warning)
  159. #pragma warning(disable: 4800)
  160. #endif
  161. template<typename T, bool Sign>
  162. class base_atomic<T, int, 1, Sign>
  163. {
  164. private:
  165. typedef base_atomic this_type;
  166. typedef T value_type;
  167. #ifdef BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE8
  168. typedef value_type storage_type;
  169. #else
  170. typedef uint32_t storage_type;
  171. #endif
  172. typedef T difference_type;
  173. protected:
  174. typedef value_type value_arg_type;
  175. public:
  176. BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
  177. BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT: v_(v) {}
  178. void
  179. store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  180. {
  181. if (order != memory_order_seq_cst) {
  182. platform_fence_before(order);
  183. v_ = static_cast< storage_type >(v);
  184. } else {
  185. exchange(v, order);
  186. }
  187. }
  188. value_type
  189. load(memory_order order = memory_order_seq_cst) const volatile BOOST_NOEXCEPT
  190. {
  191. value_type v = static_cast< value_type >(v_);
  192. platform_fence_after_load(order);
  193. return v;
  194. }
  195. value_type
  196. fetch_add(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  197. {
  198. platform_fence_before(order);
  199. #ifdef BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD8
  200. v = static_cast< value_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD8(&v_, v));
  201. #else
  202. v = static_cast< value_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD(&v_, v));
  203. #endif
  204. platform_fence_after(order);
  205. return v;
  206. }
  207. value_type
  208. fetch_sub(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  209. {
  210. typedef typename make_signed< value_type >::type signed_value_type;
  211. return fetch_add(static_cast< value_type >(-static_cast< signed_value_type >(v)), order);
  212. }
  213. value_type
  214. exchange(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  215. {
  216. platform_fence_before(order);
  217. #ifdef BOOST_ATOMIC_INTERLOCKED_EXCHANGE8
  218. v = static_cast< value_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE8(&v_, v));
  219. #else
  220. v = static_cast< value_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE(&v_, v));
  221. #endif
  222. platform_fence_after(order);
  223. return v;
  224. }
  225. bool
  226. compare_exchange_strong(
  227. value_type & expected,
  228. value_type desired,
  229. memory_order success_order,
  230. memory_order failure_order) volatile BOOST_NOEXCEPT
  231. {
  232. value_type previous = expected;
  233. platform_fence_before(success_order);
  234. #ifdef BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE8
  235. value_type oldval = static_cast< value_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE8(&v_, desired, previous));
  236. #else
  237. value_type oldval = static_cast< value_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE(&v_, desired, previous));
  238. #endif
  239. bool success = (previous == oldval);
  240. if (success)
  241. platform_fence_after(success_order);
  242. else
  243. platform_fence_after(failure_order);
  244. expected = oldval;
  245. return success;
  246. }
  247. bool
  248. compare_exchange_weak(
  249. value_type & expected,
  250. value_type desired,
  251. memory_order success_order,
  252. memory_order failure_order) volatile BOOST_NOEXCEPT
  253. {
  254. return compare_exchange_strong(expected, desired, success_order, failure_order);
  255. }
  256. value_type
  257. fetch_and(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  258. {
  259. #ifdef BOOST_ATOMIC_INTERLOCKED_AND8
  260. platform_fence_before(order);
  261. v = static_cast< value_type >(BOOST_ATOMIC_INTERLOCKED_AND8(&v_, v));
  262. platform_fence_after(order);
  263. return v;
  264. #elif defined(BOOST_ATOMIC_INTERLOCKED_AND)
  265. platform_fence_before(order);
  266. v = static_cast< value_type >(BOOST_ATOMIC_INTERLOCKED_AND(&v_, v));
  267. platform_fence_after(order);
  268. return v;
  269. #else
  270. value_type tmp = load(memory_order_relaxed);
  271. for (; !compare_exchange_weak(tmp, tmp & v, order, memory_order_relaxed);)
  272. {
  273. BOOST_ATOMIC_X86_PAUSE();
  274. }
  275. return tmp;
  276. #endif
  277. }
  278. value_type
  279. fetch_or(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  280. {
  281. #ifdef BOOST_ATOMIC_INTERLOCKED_OR8
  282. platform_fence_before(order);
  283. v = static_cast< value_type >(BOOST_ATOMIC_INTERLOCKED_OR8(&v_, v));
  284. platform_fence_after(order);
  285. return v;
  286. #elif defined(BOOST_ATOMIC_INTERLOCKED_OR)
  287. platform_fence_before(order);
  288. v = static_cast< value_type >(BOOST_ATOMIC_INTERLOCKED_OR(&v_, v));
  289. platform_fence_after(order);
  290. return v;
  291. #else
  292. value_type tmp = load(memory_order_relaxed);
  293. for (; !compare_exchange_weak(tmp, tmp | v, order, memory_order_relaxed);)
  294. {
  295. BOOST_ATOMIC_X86_PAUSE();
  296. }
  297. return tmp;
  298. #endif
  299. }
  300. value_type
  301. fetch_xor(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  302. {
  303. #ifdef BOOST_ATOMIC_INTERLOCKED_XOR8
  304. platform_fence_before(order);
  305. v = static_cast< value_type >(BOOST_ATOMIC_INTERLOCKED_XOR8(&v_, v));
  306. platform_fence_after(order);
  307. return v;
  308. #elif defined(BOOST_ATOMIC_INTERLOCKED_XOR)
  309. platform_fence_before(order);
  310. v = static_cast< value_type >(BOOST_ATOMIC_INTERLOCKED_XOR(&v_, v));
  311. platform_fence_after(order);
  312. return v;
  313. #else
  314. value_type tmp = load(memory_order_relaxed);
  315. for (; !compare_exchange_weak(tmp, tmp ^ v, order, memory_order_relaxed);)
  316. {
  317. BOOST_ATOMIC_X86_PAUSE();
  318. }
  319. return tmp;
  320. #endif
  321. }
  322. bool
  323. is_lock_free(void) const volatile BOOST_NOEXCEPT
  324. {
  325. return true;
  326. }
  327. BOOST_ATOMIC_DECLARE_INTEGRAL_OPERATORS
  328. BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
  329. BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
  330. private:
  331. storage_type v_;
  332. };
  333. #if defined(_MSC_VER)
  334. #pragma warning(pop)
  335. #endif
  336. template<typename T, bool Sign>
  337. class base_atomic<T, int, 2, Sign>
  338. {
  339. private:
  340. typedef base_atomic this_type;
  341. typedef T value_type;
  342. #ifdef BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE16
  343. typedef value_type storage_type;
  344. #else
  345. typedef uint32_t storage_type;
  346. #endif
  347. typedef T difference_type;
  348. protected:
  349. typedef value_type value_arg_type;
  350. public:
  351. BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
  352. BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT: v_(v) {}
  353. void
  354. store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  355. {
  356. if (order != memory_order_seq_cst) {
  357. platform_fence_before(order);
  358. v_ = static_cast< storage_type >(v);
  359. } else {
  360. exchange(v, order);
  361. }
  362. }
  363. value_type
  364. load(memory_order order = memory_order_seq_cst) const volatile BOOST_NOEXCEPT
  365. {
  366. value_type v = static_cast< value_type >(v_);
  367. platform_fence_after_load(order);
  368. return v;
  369. }
  370. value_type
  371. fetch_add(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  372. {
  373. platform_fence_before(order);
  374. #ifdef BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD16
  375. v = static_cast< value_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD16(&v_, v));
  376. #else
  377. v = static_cast< value_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD(&v_, v));
  378. #endif
  379. platform_fence_after(order);
  380. return v;
  381. }
  382. value_type
  383. fetch_sub(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  384. {
  385. typedef typename make_signed< value_type >::type signed_value_type;
  386. return fetch_add(static_cast< value_type >(-static_cast< signed_value_type >(v)), order);
  387. }
  388. value_type
  389. exchange(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  390. {
  391. platform_fence_before(order);
  392. #ifdef BOOST_ATOMIC_INTERLOCKED_EXCHANGE16
  393. v = static_cast< value_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE16(&v_, v));
  394. #else
  395. v = static_cast< value_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE(&v_, v));
  396. #endif
  397. platform_fence_after(order);
  398. return v;
  399. }
  400. bool
  401. compare_exchange_strong(
  402. value_type & expected,
  403. value_type desired,
  404. memory_order success_order,
  405. memory_order failure_order) volatile BOOST_NOEXCEPT
  406. {
  407. value_type previous = expected;
  408. platform_fence_before(success_order);
  409. #ifdef BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE16
  410. value_type oldval = static_cast< value_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE16(&v_, desired, previous));
  411. #else
  412. value_type oldval = static_cast< value_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE(&v_, desired, previous));
  413. #endif
  414. bool success = (previous == oldval);
  415. if (success)
  416. platform_fence_after(success_order);
  417. else
  418. platform_fence_after(failure_order);
  419. expected = oldval;
  420. return success;
  421. }
  422. bool
  423. compare_exchange_weak(
  424. value_type & expected,
  425. value_type desired,
  426. memory_order success_order,
  427. memory_order failure_order) volatile BOOST_NOEXCEPT
  428. {
  429. return compare_exchange_strong(expected, desired, success_order, failure_order);
  430. }
  431. value_type
  432. fetch_and(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  433. {
  434. #ifdef BOOST_ATOMIC_INTERLOCKED_AND16
  435. platform_fence_before(order);
  436. v = static_cast< value_type >(BOOST_ATOMIC_INTERLOCKED_AND16(&v_, v));
  437. platform_fence_after(order);
  438. return v;
  439. #elif defined(BOOST_ATOMIC_INTERLOCKED_AND)
  440. platform_fence_before(order);
  441. v = static_cast< value_type >(BOOST_ATOMIC_INTERLOCKED_AND(&v_, v));
  442. platform_fence_after(order);
  443. return v;
  444. #else
  445. value_type tmp = load(memory_order_relaxed);
  446. for (; !compare_exchange_weak(tmp, tmp & v, order, memory_order_relaxed);)
  447. {
  448. BOOST_ATOMIC_X86_PAUSE();
  449. }
  450. return tmp;
  451. #endif
  452. }
  453. value_type
  454. fetch_or(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  455. {
  456. #ifdef BOOST_ATOMIC_INTERLOCKED_OR16
  457. platform_fence_before(order);
  458. v = static_cast< value_type >(BOOST_ATOMIC_INTERLOCKED_OR16(&v_, v));
  459. platform_fence_after(order);
  460. return v;
  461. #elif defined(BOOST_ATOMIC_INTERLOCKED_OR)
  462. platform_fence_before(order);
  463. v = static_cast< value_type >(BOOST_ATOMIC_INTERLOCKED_OR(&v_, v));
  464. platform_fence_after(order);
  465. return v;
  466. #else
  467. value_type tmp = load(memory_order_relaxed);
  468. for (; !compare_exchange_weak(tmp, tmp | v, order, memory_order_relaxed);)
  469. {
  470. BOOST_ATOMIC_X86_PAUSE();
  471. }
  472. return tmp;
  473. #endif
  474. }
  475. value_type
  476. fetch_xor(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  477. {
  478. #ifdef BOOST_ATOMIC_INTERLOCKED_XOR16
  479. platform_fence_before(order);
  480. v = static_cast< value_type >(BOOST_ATOMIC_INTERLOCKED_XOR16(&v_, v));
  481. platform_fence_after(order);
  482. return v;
  483. #elif defined(BOOST_ATOMIC_INTERLOCKED_XOR)
  484. platform_fence_before(order);
  485. v = static_cast< value_type >(BOOST_ATOMIC_INTERLOCKED_XOR(&v_, v));
  486. platform_fence_after(order);
  487. return v;
  488. #else
  489. value_type tmp = load(memory_order_relaxed);
  490. for (; !compare_exchange_weak(tmp, tmp ^ v, order, memory_order_relaxed);)
  491. {
  492. BOOST_ATOMIC_X86_PAUSE();
  493. }
  494. return tmp;
  495. #endif
  496. }
  497. bool
  498. is_lock_free(void) const volatile BOOST_NOEXCEPT
  499. {
  500. return true;
  501. }
  502. BOOST_ATOMIC_DECLARE_INTEGRAL_OPERATORS
  503. BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
  504. BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
  505. private:
  506. storage_type v_;
  507. };
  508. template<typename T, bool Sign>
  509. class base_atomic<T, int, 4, Sign>
  510. {
  511. private:
  512. typedef base_atomic this_type;
  513. typedef T value_type;
  514. typedef value_type storage_type;
  515. typedef T difference_type;
  516. protected:
  517. typedef value_type value_arg_type;
  518. public:
  519. BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
  520. BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT: v_(v) {}
  521. void
  522. store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  523. {
  524. if (order != memory_order_seq_cst) {
  525. platform_fence_before(order);
  526. v_ = static_cast< storage_type >(v);
  527. } else {
  528. exchange(v, order);
  529. }
  530. }
  531. value_type
  532. load(memory_order order = memory_order_seq_cst) const volatile BOOST_NOEXCEPT
  533. {
  534. value_type v = static_cast< value_type >(v_);
  535. platform_fence_after_load(order);
  536. return v;
  537. }
  538. value_type
  539. fetch_add(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  540. {
  541. platform_fence_before(order);
  542. v = static_cast< value_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD(&v_, v));
  543. platform_fence_after(order);
  544. return v;
  545. }
  546. value_type
  547. fetch_sub(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  548. {
  549. typedef typename make_signed< value_type >::type signed_value_type;
  550. return fetch_add(static_cast< value_type >(-static_cast< signed_value_type >(v)), order);
  551. }
  552. value_type
  553. exchange(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  554. {
  555. platform_fence_before(order);
  556. v = static_cast< value_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE(&v_, v));
  557. platform_fence_after(order);
  558. return v;
  559. }
  560. bool
  561. compare_exchange_strong(
  562. value_type & expected,
  563. value_type desired,
  564. memory_order success_order,
  565. memory_order failure_order) volatile BOOST_NOEXCEPT
  566. {
  567. value_type previous = expected;
  568. platform_fence_before(success_order);
  569. value_type oldval = static_cast< value_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE(&v_, desired, previous));
  570. bool success = (previous == oldval);
  571. if (success)
  572. platform_fence_after(success_order);
  573. else
  574. platform_fence_after(failure_order);
  575. expected = oldval;
  576. return success;
  577. }
  578. bool
  579. compare_exchange_weak(
  580. value_type & expected,
  581. value_type desired,
  582. memory_order success_order,
  583. memory_order failure_order) volatile BOOST_NOEXCEPT
  584. {
  585. return compare_exchange_strong(expected, desired, success_order, failure_order);
  586. }
  587. value_type
  588. fetch_and(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  589. {
  590. #if defined(BOOST_ATOMIC_INTERLOCKED_AND)
  591. platform_fence_before(order);
  592. v = static_cast< value_type >(BOOST_ATOMIC_INTERLOCKED_AND(&v_, v));
  593. platform_fence_after(order);
  594. return v;
  595. #else
  596. value_type tmp = load(memory_order_relaxed);
  597. for (; !compare_exchange_weak(tmp, tmp & v, order, memory_order_relaxed);)
  598. {
  599. BOOST_ATOMIC_X86_PAUSE();
  600. }
  601. return tmp;
  602. #endif
  603. }
  604. value_type
  605. fetch_or(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  606. {
  607. #if defined(BOOST_ATOMIC_INTERLOCKED_OR)
  608. platform_fence_before(order);
  609. v = static_cast< value_type >(BOOST_ATOMIC_INTERLOCKED_OR(&v_, v));
  610. platform_fence_after(order);
  611. return v;
  612. #else
  613. value_type tmp = load(memory_order_relaxed);
  614. for(; !compare_exchange_weak(tmp, tmp | v, order, memory_order_relaxed);)
  615. {
  616. BOOST_ATOMIC_X86_PAUSE();
  617. }
  618. return tmp;
  619. #endif
  620. }
  621. value_type
  622. fetch_xor(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  623. {
  624. #if defined(BOOST_ATOMIC_INTERLOCKED_XOR)
  625. platform_fence_before(order);
  626. v = static_cast< value_type >(BOOST_ATOMIC_INTERLOCKED_XOR(&v_, v));
  627. platform_fence_after(order);
  628. return v;
  629. #else
  630. value_type tmp = load(memory_order_relaxed);
  631. for (; !compare_exchange_weak(tmp, tmp ^ v, order, memory_order_relaxed);)
  632. {
  633. BOOST_ATOMIC_X86_PAUSE();
  634. }
  635. return tmp;
  636. #endif
  637. }
  638. bool
  639. is_lock_free(void)const volatile BOOST_NOEXCEPT
  640. {
  641. return true;
  642. }
  643. BOOST_ATOMIC_DECLARE_INTEGRAL_OPERATORS
  644. BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
  645. BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
  646. private:
  647. storage_type v_;
  648. };
  649. // MSVC 2012 fails to recognize sizeof(T) as a constant expression in template specializations
  650. enum msvc_sizeof_pointer_workaround { sizeof_pointer = sizeof(void*) };
  651. template<bool Sign>
  652. class base_atomic<void*, void*, sizeof_pointer, Sign>
  653. {
  654. private:
  655. typedef base_atomic this_type;
  656. typedef std::ptrdiff_t difference_type;
  657. typedef void* value_type;
  658. protected:
  659. typedef value_type value_arg_type;
  660. public:
  661. BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
  662. BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT: v_(v) {}
  663. void
  664. store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  665. {
  666. if (order != memory_order_seq_cst) {
  667. platform_fence_before(order);
  668. const_cast<volatile value_type &>(v_) = v;
  669. } else {
  670. exchange(v, order);
  671. }
  672. }
  673. value_type load(memory_order order = memory_order_seq_cst)const volatile BOOST_NOEXCEPT
  674. {
  675. value_type v = const_cast<const volatile value_type &>(v_);
  676. platform_fence_after_load(order);
  677. return v;
  678. }
  679. value_type exchange(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  680. {
  681. platform_fence_before(order);
  682. v = (value_type)BOOST_ATOMIC_INTERLOCKED_EXCHANGE_POINTER(&v_, v);
  683. platform_fence_after(order);
  684. return v;
  685. }
  686. bool compare_exchange_strong(value_type & expected, value_type desired,
  687. memory_order success_order,
  688. memory_order failure_order) volatile BOOST_NOEXCEPT
  689. {
  690. value_type previous = expected;
  691. platform_fence_before(success_order);
  692. value_type oldval = (value_type)BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE_POINTER(&v_, desired, previous);
  693. bool success = (previous == oldval);
  694. if (success)
  695. platform_fence_after(success_order);
  696. else
  697. platform_fence_after(failure_order);
  698. expected = oldval;
  699. return success;
  700. }
  701. bool compare_exchange_weak(value_type & expected, value_type desired,
  702. memory_order success_order,
  703. memory_order failure_order) volatile BOOST_NOEXCEPT
  704. {
  705. return compare_exchange_strong(expected, desired, success_order, failure_order);
  706. }
  707. bool
  708. is_lock_free(void)const volatile BOOST_NOEXCEPT
  709. {
  710. return true;
  711. }
  712. value_type
  713. fetch_add(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  714. {
  715. platform_fence_before(order);
  716. value_type res = (value_type)BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD_POINTER(&v_, v);
  717. platform_fence_after(order);
  718. return res;
  719. }
  720. value_type
  721. fetch_sub(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  722. {
  723. return fetch_add(-v, order);
  724. }
  725. BOOST_ATOMIC_DECLARE_VOID_POINTER_OPERATORS
  726. BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
  727. BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
  728. private:
  729. value_type v_;
  730. };
  731. template<typename T, bool Sign>
  732. class base_atomic<T*, void*, sizeof_pointer, Sign>
  733. {
  734. private:
  735. typedef base_atomic this_type;
  736. typedef T* value_type;
  737. typedef std::ptrdiff_t difference_type;
  738. protected:
  739. typedef value_type value_arg_type;
  740. public:
  741. BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
  742. BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT: v_(v) {}
  743. void
  744. store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  745. {
  746. if (order != memory_order_seq_cst) {
  747. platform_fence_before(order);
  748. const_cast<volatile value_type &>(v_) = v;
  749. } else {
  750. exchange(v, order);
  751. }
  752. }
  753. value_type
  754. load(memory_order order = memory_order_seq_cst)const volatile BOOST_NOEXCEPT
  755. {
  756. value_type v = const_cast<const volatile value_type &>(v_);
  757. platform_fence_after_load(order);
  758. return v;
  759. }
  760. value_type
  761. exchange(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  762. {
  763. platform_fence_before(order);
  764. v = (value_type)BOOST_ATOMIC_INTERLOCKED_EXCHANGE_POINTER(&v_, v);
  765. platform_fence_after(order);
  766. return v;
  767. }
  768. bool
  769. compare_exchange_strong(
  770. value_type & expected,
  771. value_type desired,
  772. memory_order success_order,
  773. memory_order failure_order) volatile BOOST_NOEXCEPT
  774. {
  775. value_type previous = expected;
  776. platform_fence_before(success_order);
  777. value_type oldval = (value_type)BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE_POINTER(&v_, desired, previous);
  778. bool success = (previous == oldval);
  779. if (success)
  780. platform_fence_after(success_order);
  781. else
  782. platform_fence_after(failure_order);
  783. expected = oldval;
  784. return success;
  785. }
  786. bool
  787. compare_exchange_weak(
  788. value_type & expected,
  789. value_type desired,
  790. memory_order success_order,
  791. memory_order failure_order) volatile BOOST_NOEXCEPT
  792. {
  793. return compare_exchange_strong(expected, desired, success_order, failure_order);
  794. }
  795. value_type
  796. fetch_add(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  797. {
  798. v = v * sizeof(*v_);
  799. platform_fence_before(order);
  800. value_type res = (value_type)BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD_POINTER(&v_, v);
  801. platform_fence_after(order);
  802. return res;
  803. }
  804. value_type
  805. fetch_sub(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  806. {
  807. return fetch_add(-v, order);
  808. }
  809. bool
  810. is_lock_free(void)const volatile BOOST_NOEXCEPT
  811. {
  812. return true;
  813. }
  814. BOOST_ATOMIC_DECLARE_POINTER_OPERATORS
  815. BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
  816. BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
  817. private:
  818. value_type v_;
  819. };
  820. template<typename T, bool Sign>
  821. class base_atomic<T, void, 1, Sign>
  822. {
  823. private:
  824. typedef base_atomic this_type;
  825. typedef T value_type;
  826. #ifdef BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE8
  827. typedef uint8_t storage_type;
  828. #else
  829. typedef uint32_t storage_type;
  830. #endif
  831. protected:
  832. typedef value_type const& value_arg_type;
  833. public:
  834. BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
  835. #ifdef BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE8
  836. BOOST_CONSTEXPR explicit base_atomic(value_type const& v) BOOST_NOEXCEPT : v_(reinterpret_cast< storage_type const& >(v))
  837. {
  838. }
  839. #else
  840. explicit base_atomic(value_type const& v) BOOST_NOEXCEPT : v_(0)
  841. {
  842. memcpy(&v_, &v, sizeof(value_type));
  843. }
  844. #endif
  845. void
  846. store(value_type const& v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  847. {
  848. if (order != memory_order_seq_cst) {
  849. storage_type tmp = 0;
  850. memcpy(&tmp, &v, sizeof(value_type));
  851. platform_fence_before(order);
  852. const_cast<volatile storage_type &>(v_) = tmp;
  853. } else {
  854. exchange(v, order);
  855. }
  856. }
  857. value_type
  858. load(memory_order order = memory_order_seq_cst) const volatile BOOST_NOEXCEPT
  859. {
  860. storage_type tmp = const_cast<volatile storage_type &>(v_);
  861. platform_fence_after_load(order);
  862. value_type v;
  863. memcpy(&v, &tmp, sizeof(value_type));
  864. return v;
  865. }
  866. value_type
  867. exchange(value_type const& v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  868. {
  869. storage_type tmp = 0;
  870. memcpy(&tmp, &v, sizeof(value_type));
  871. platform_fence_before(order);
  872. #ifdef BOOST_ATOMIC_INTERLOCKED_EXCHANGE8
  873. tmp = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE8(&v_, tmp));
  874. #else
  875. tmp = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE(&v_, tmp));
  876. #endif
  877. platform_fence_after(order);
  878. value_type res;
  879. memcpy(&res, &tmp, sizeof(value_type));
  880. return res;
  881. }
  882. bool
  883. compare_exchange_strong(
  884. value_type & expected,
  885. value_type const& desired,
  886. memory_order success_order,
  887. memory_order failure_order) volatile BOOST_NOEXCEPT
  888. {
  889. storage_type expected_s = 0, desired_s = 0;
  890. memcpy(&expected_s, &expected, sizeof(value_type));
  891. memcpy(&desired_s, &desired, sizeof(value_type));
  892. platform_fence_before(success_order);
  893. #ifdef BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE8
  894. storage_type oldval = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE8(&v_, desired_s, expected_s));
  895. #else
  896. storage_type oldval = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE(&v_, desired_s, expected_s));
  897. #endif
  898. bool success = (oldval == expected_s);
  899. if (success)
  900. platform_fence_after(success_order);
  901. else
  902. platform_fence_after(failure_order);
  903. memcpy(&expected, &oldval, sizeof(value_type));
  904. return success;
  905. }
  906. bool
  907. compare_exchange_weak(
  908. value_type & expected,
  909. value_type const& desired,
  910. memory_order success_order,
  911. memory_order failure_order) volatile BOOST_NOEXCEPT
  912. {
  913. return compare_exchange_strong(expected, desired, success_order, failure_order);
  914. }
  915. bool
  916. is_lock_free(void) const volatile BOOST_NOEXCEPT
  917. {
  918. return true;
  919. }
  920. BOOST_ATOMIC_DECLARE_BASE_OPERATORS
  921. BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
  922. BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
  923. private:
  924. storage_type v_;
  925. };
  926. template<typename T, bool Sign>
  927. class base_atomic<T, void, 2, Sign>
  928. {
  929. private:
  930. typedef base_atomic this_type;
  931. typedef T value_type;
  932. #ifdef BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE16
  933. typedef uint16_t storage_type;
  934. #else
  935. typedef uint32_t storage_type;
  936. #endif
  937. protected:
  938. typedef value_type const& value_arg_type;
  939. public:
  940. BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
  941. #ifdef BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE16
  942. BOOST_CONSTEXPR explicit base_atomic(value_type const& v) BOOST_NOEXCEPT : v_(reinterpret_cast< storage_type const& >(v))
  943. {
  944. }
  945. #else
  946. explicit base_atomic(value_type const& v) BOOST_NOEXCEPT : v_(0)
  947. {
  948. memcpy(&v_, &v, sizeof(value_type));
  949. }
  950. #endif
  951. void
  952. store(value_type const& v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  953. {
  954. if (order != memory_order_seq_cst) {
  955. storage_type tmp = 0;
  956. memcpy(&tmp, &v, sizeof(value_type));
  957. platform_fence_before(order);
  958. const_cast<volatile storage_type &>(v_) = tmp;
  959. } else {
  960. exchange(v, order);
  961. }
  962. }
  963. value_type
  964. load(memory_order order = memory_order_seq_cst)const volatile BOOST_NOEXCEPT
  965. {
  966. storage_type tmp = const_cast<volatile storage_type &>(v_);
  967. platform_fence_after_load(order);
  968. value_type v;
  969. memcpy(&v, &tmp, sizeof(value_type));
  970. return v;
  971. }
  972. value_type
  973. exchange(value_type const& v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  974. {
  975. storage_type tmp = 0;
  976. memcpy(&tmp, &v, sizeof(value_type));
  977. platform_fence_before(order);
  978. #ifdef BOOST_ATOMIC_INTERLOCKED_EXCHANGE16
  979. tmp = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE16(&v_, tmp));
  980. #else
  981. tmp = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE(&v_, tmp));
  982. #endif
  983. platform_fence_after(order);
  984. value_type res;
  985. memcpy(&res, &tmp, sizeof(value_type));
  986. return res;
  987. }
  988. bool
  989. compare_exchange_strong(
  990. value_type & expected,
  991. value_type const& desired,
  992. memory_order success_order,
  993. memory_order failure_order) volatile BOOST_NOEXCEPT
  994. {
  995. storage_type expected_s = 0, desired_s = 0;
  996. memcpy(&expected_s, &expected, sizeof(value_type));
  997. memcpy(&desired_s, &desired, sizeof(value_type));
  998. platform_fence_before(success_order);
  999. #ifdef BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE16
  1000. storage_type oldval = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE16(&v_, desired_s, expected_s));
  1001. #else
  1002. storage_type oldval = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE(&v_, desired_s, expected_s));
  1003. #endif
  1004. bool success = (oldval == expected_s);
  1005. if (success)
  1006. platform_fence_after(success_order);
  1007. else
  1008. platform_fence_after(failure_order);
  1009. memcpy(&expected, &oldval, sizeof(value_type));
  1010. return success;
  1011. }
  1012. bool
  1013. compare_exchange_weak(
  1014. value_type & expected,
  1015. value_type const& desired,
  1016. memory_order success_order,
  1017. memory_order failure_order) volatile BOOST_NOEXCEPT
  1018. {
  1019. return compare_exchange_strong(expected, desired, success_order, failure_order);
  1020. }
  1021. bool
  1022. is_lock_free(void)const volatile BOOST_NOEXCEPT
  1023. {
  1024. return true;
  1025. }
  1026. BOOST_ATOMIC_DECLARE_BASE_OPERATORS
  1027. BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
  1028. BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
  1029. private:
  1030. storage_type v_;
  1031. };
  1032. template<typename T, bool Sign>
  1033. class base_atomic<T, void, 4, Sign>
  1034. {
  1035. private:
  1036. typedef base_atomic this_type;
  1037. typedef T value_type;
  1038. typedef uint32_t storage_type;
  1039. protected:
  1040. typedef value_type const& value_arg_type;
  1041. public:
  1042. BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
  1043. explicit base_atomic(value_type const& v) : v_(0)
  1044. {
  1045. memcpy(&v_, &v, sizeof(value_type));
  1046. }
  1047. void
  1048. store(value_type const& v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  1049. {
  1050. if (order != memory_order_seq_cst) {
  1051. storage_type tmp = 0;
  1052. memcpy(&tmp, &v, sizeof(value_type));
  1053. platform_fence_before(order);
  1054. const_cast<volatile storage_type &>(v_) = tmp;
  1055. } else {
  1056. exchange(v, order);
  1057. }
  1058. }
  1059. value_type
  1060. load(memory_order order = memory_order_seq_cst)const volatile BOOST_NOEXCEPT
  1061. {
  1062. storage_type tmp = const_cast<volatile storage_type &>(v_);
  1063. platform_fence_after_load(order);
  1064. value_type v;
  1065. memcpy(&v, &tmp, sizeof(value_type));
  1066. return v;
  1067. }
  1068. value_type
  1069. exchange(value_type const& v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  1070. {
  1071. storage_type tmp = 0;
  1072. memcpy(&tmp, &v, sizeof(value_type));
  1073. platform_fence_before(order);
  1074. tmp = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE(&v_, tmp));
  1075. platform_fence_after(order);
  1076. value_type res;
  1077. memcpy(&res, &tmp, sizeof(value_type));
  1078. return res;
  1079. }
  1080. bool
  1081. compare_exchange_strong(
  1082. value_type & expected,
  1083. value_type const& desired,
  1084. memory_order success_order,
  1085. memory_order failure_order) volatile BOOST_NOEXCEPT
  1086. {
  1087. storage_type expected_s = 0, desired_s = 0;
  1088. memcpy(&expected_s, &expected, sizeof(value_type));
  1089. memcpy(&desired_s, &desired, sizeof(value_type));
  1090. platform_fence_before(success_order);
  1091. storage_type oldval = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE(&v_, desired_s, expected_s));
  1092. bool success = (oldval == expected_s);
  1093. if (success)
  1094. platform_fence_after(success_order);
  1095. else
  1096. platform_fence_after(failure_order);
  1097. memcpy(&expected, &oldval, sizeof(value_type));
  1098. return success;
  1099. }
  1100. bool
  1101. compare_exchange_weak(
  1102. value_type & expected,
  1103. value_type const& desired,
  1104. memory_order success_order,
  1105. memory_order failure_order) volatile BOOST_NOEXCEPT
  1106. {
  1107. return compare_exchange_strong(expected, desired, success_order, failure_order);
  1108. }
  1109. bool
  1110. is_lock_free(void) const volatile BOOST_NOEXCEPT
  1111. {
  1112. return true;
  1113. }
  1114. BOOST_ATOMIC_DECLARE_BASE_OPERATORS
  1115. BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
  1116. BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
  1117. private:
  1118. storage_type v_;
  1119. };
  1120. #if defined(_M_AMD64) || defined(_M_IA64)
  1121. template<typename T, bool Sign>
  1122. class base_atomic<T, int, 8, Sign>
  1123. {
  1124. private:
  1125. typedef base_atomic this_type;
  1126. typedef T value_type;
  1127. typedef value_type storage_type;
  1128. typedef T difference_type;
  1129. protected:
  1130. typedef value_type value_arg_type;
  1131. public:
  1132. BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
  1133. BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT: v_(v) {}
  1134. void
  1135. store(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  1136. {
  1137. if (order != memory_order_seq_cst) {
  1138. platform_fence_before(order);
  1139. v_ = static_cast< storage_type >(v);
  1140. } else {
  1141. exchange(v, order);
  1142. }
  1143. }
  1144. value_type
  1145. load(memory_order order = memory_order_seq_cst) const volatile BOOST_NOEXCEPT
  1146. {
  1147. value_type v = static_cast< value_type >(v_);
  1148. platform_fence_after_load(order);
  1149. return v;
  1150. }
  1151. value_type
  1152. fetch_add(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  1153. {
  1154. platform_fence_before(order);
  1155. v = static_cast< value_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD64(&v_, v));
  1156. platform_fence_after(order);
  1157. return v;
  1158. }
  1159. value_type
  1160. fetch_sub(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  1161. {
  1162. typedef typename make_signed< value_type >::type signed_value_type;
  1163. return fetch_add(static_cast< value_type >(-static_cast< signed_value_type >(v)), order);
  1164. }
  1165. value_type
  1166. exchange(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  1167. {
  1168. platform_fence_before(order);
  1169. v = static_cast< value_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE64(&v_, v));
  1170. platform_fence_after(order);
  1171. return v;
  1172. }
  1173. bool
  1174. compare_exchange_strong(
  1175. value_type & expected,
  1176. value_type desired,
  1177. memory_order success_order,
  1178. memory_order failure_order) volatile BOOST_NOEXCEPT
  1179. {
  1180. value_type previous = expected;
  1181. platform_fence_before(success_order);
  1182. value_type oldval = static_cast< value_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE64(&v_, desired, previous));
  1183. bool success = (previous == oldval);
  1184. if (success)
  1185. platform_fence_after(success_order);
  1186. else
  1187. platform_fence_after(failure_order);
  1188. expected = oldval;
  1189. return success;
  1190. }
  1191. bool
  1192. compare_exchange_weak(
  1193. value_type & expected,
  1194. value_type desired,
  1195. memory_order success_order,
  1196. memory_order failure_order) volatile BOOST_NOEXCEPT
  1197. {
  1198. return compare_exchange_strong(expected, desired, success_order, failure_order);
  1199. }
  1200. value_type
  1201. fetch_and(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  1202. {
  1203. #if defined(BOOST_ATOMIC_INTERLOCKED_AND64)
  1204. platform_fence_before(order);
  1205. v = static_cast< value_type >(BOOST_ATOMIC_INTERLOCKED_AND64(&v_, v));
  1206. platform_fence_after(order);
  1207. return v;
  1208. #else
  1209. value_type tmp = load(memory_order_relaxed);
  1210. for (; !compare_exchange_weak(tmp, tmp & v, order, memory_order_relaxed);)
  1211. {
  1212. BOOST_ATOMIC_X86_PAUSE();
  1213. }
  1214. return tmp;
  1215. #endif
  1216. }
  1217. value_type
  1218. fetch_or(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  1219. {
  1220. #if defined(BOOST_ATOMIC_INTERLOCKED_OR64)
  1221. platform_fence_before(order);
  1222. v = static_cast< value_type >(BOOST_ATOMIC_INTERLOCKED_OR64(&v_, v));
  1223. platform_fence_after(order);
  1224. return v;
  1225. #else
  1226. value_type tmp = load(memory_order_relaxed);
  1227. for (; !compare_exchange_weak(tmp, tmp | v, order, memory_order_relaxed);)
  1228. {
  1229. BOOST_ATOMIC_X86_PAUSE();
  1230. }
  1231. return tmp;
  1232. #endif
  1233. }
  1234. value_type
  1235. fetch_xor(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  1236. {
  1237. #if defined(BOOST_ATOMIC_INTERLOCKED_XOR64)
  1238. platform_fence_before(order);
  1239. v = static_cast< value_type >(BOOST_ATOMIC_INTERLOCKED_XOR64(&v_, v));
  1240. platform_fence_after(order);
  1241. return v;
  1242. #else
  1243. value_type tmp = load(memory_order_relaxed);
  1244. for (; !compare_exchange_weak(tmp, tmp ^ v, order, memory_order_relaxed);)
  1245. {
  1246. BOOST_ATOMIC_X86_PAUSE();
  1247. }
  1248. return tmp;
  1249. #endif
  1250. }
  1251. bool
  1252. is_lock_free(void)const volatile BOOST_NOEXCEPT
  1253. {
  1254. return true;
  1255. }
  1256. BOOST_ATOMIC_DECLARE_INTEGRAL_OPERATORS
  1257. BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
  1258. BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
  1259. private:
  1260. storage_type v_;
  1261. };
  1262. template<typename T, bool Sign>
  1263. class base_atomic<T, void, 8, Sign>
  1264. {
  1265. private:
  1266. typedef base_atomic this_type;
  1267. typedef T value_type;
  1268. typedef uint64_t storage_type;
  1269. protected:
  1270. typedef value_type const& value_arg_type;
  1271. public:
  1272. BOOST_DEFAULTED_FUNCTION(base_atomic(void), {})
  1273. explicit base_atomic(value_type const& v) : v_(0)
  1274. {
  1275. memcpy(&v_, &v, sizeof(value_type));
  1276. }
  1277. void
  1278. store(value_type const& v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  1279. {
  1280. if (order != memory_order_seq_cst) {
  1281. storage_type tmp = 0;
  1282. memcpy(&tmp, &v, sizeof(value_type));
  1283. platform_fence_before(order);
  1284. const_cast<volatile storage_type &>(v_) = tmp;
  1285. } else {
  1286. exchange(v, order);
  1287. }
  1288. }
  1289. value_type
  1290. load(memory_order order = memory_order_seq_cst)const volatile BOOST_NOEXCEPT
  1291. {
  1292. storage_type tmp = const_cast<volatile storage_type &>(v_);
  1293. platform_fence_after_load(order);
  1294. value_type v;
  1295. memcpy(&v, &tmp, sizeof(value_type));
  1296. return v;
  1297. }
  1298. value_type
  1299. exchange(value_type const& v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
  1300. {
  1301. storage_type tmp = 0;
  1302. memcpy(&tmp, &v, sizeof(value_type));
  1303. platform_fence_before(order);
  1304. tmp = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE64(&v_, tmp));
  1305. platform_fence_after(order);
  1306. value_type res;
  1307. memcpy(&res, &tmp, sizeof(value_type));
  1308. return res;
  1309. }
  1310. bool
  1311. compare_exchange_strong(
  1312. value_type & expected,
  1313. value_type const& desired,
  1314. memory_order success_order,
  1315. memory_order failure_order) volatile BOOST_NOEXCEPT
  1316. {
  1317. storage_type expected_s = 0, desired_s = 0;
  1318. memcpy(&expected_s, &expected, sizeof(value_type));
  1319. memcpy(&desired_s, &desired, sizeof(value_type));
  1320. platform_fence_before(success_order);
  1321. storage_type oldval = static_cast< storage_type >(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE64(&v_, desired_s, expected_s));
  1322. bool success = (oldval == expected_s);
  1323. if (success)
  1324. platform_fence_after(success_order);
  1325. else
  1326. platform_fence_after(failure_order);
  1327. memcpy(&expected, &oldval, sizeof(value_type));
  1328. return success;
  1329. }
  1330. bool
  1331. compare_exchange_weak(
  1332. value_type & expected,
  1333. value_type const& desired,
  1334. memory_order success_order,
  1335. memory_order failure_order) volatile BOOST_NOEXCEPT
  1336. {
  1337. return compare_exchange_strong(expected, desired, success_order, failure_order);
  1338. }
  1339. bool
  1340. is_lock_free(void)const volatile BOOST_NOEXCEPT
  1341. {
  1342. return true;
  1343. }
  1344. BOOST_ATOMIC_DECLARE_BASE_OPERATORS
  1345. BOOST_DELETED_FUNCTION(base_atomic(base_atomic const&))
  1346. BOOST_DELETED_FUNCTION(base_atomic& operator=(base_atomic const&))
  1347. private:
  1348. storage_type v_;
  1349. };
  1350. #elif defined(BOOST_ATOMIC_X86_HAS_CMPXCHG8B)
  1351. template<typename T>
  1352. inline bool
  1353. platform_cmpxchg64_strong(T & expected, T desired, volatile T * p) BOOST_NOEXCEPT
  1354. {
  1355. #if defined(BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE64)
  1356. const T oldval = BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE64(p, desired, expected);
  1357. const bool result = (oldval == expected);
  1358. expected = oldval;
  1359. return result;
  1360. #else
  1361. bool result;
  1362. __asm
  1363. {
  1364. mov edi, p
  1365. mov esi, expected
  1366. mov ebx, dword ptr [desired]
  1367. mov ecx, dword ptr [desired + 4]
  1368. mov eax, dword ptr [esi]
  1369. mov edx, dword ptr [esi + 4]
  1370. lock cmpxchg8b qword ptr [edi]
  1371. mov dword ptr [esi], eax
  1372. mov dword ptr [esi + 4], edx
  1373. sete result
  1374. };
  1375. return result;
  1376. #endif
  1377. }
  1378. // Intel 64 and IA-32 Architectures Software Developer's Manual, Volume 3A, 8.1.1. Guaranteed Atomic Operations:
  1379. //
  1380. // The Pentium processor (and newer processors since) guarantees that the following additional memory operations will always be carried out atomically:
  1381. // * Reading or writing a quadword aligned on a 64-bit boundary
  1382. //
  1383. // Luckily, the memory is almost always 8-byte aligned in our case because atomic<> uses 64 bit native types for storage and dynamic memory allocations
  1384. // have at least 8 byte alignment. The only unfortunate case is when atomic is placeod on the stack and it is not 8-byte aligned (like on 32 bit Windows).
  1385. template<typename T>
  1386. inline void
  1387. platform_store64(T value, volatile T * p) BOOST_NOEXCEPT
  1388. {
  1389. if (((uint32_t)p & 0x00000007) == 0)
  1390. {
  1391. #if defined(_M_IX86_FP) && _M_IX86_FP >= 2
  1392. __asm
  1393. {
  1394. mov edx, p
  1395. movq xmm4, value
  1396. movq qword ptr [edx], xmm4
  1397. };
  1398. #else
  1399. __asm
  1400. {
  1401. mov edx, p
  1402. fild value
  1403. fistp qword ptr [edx]
  1404. };
  1405. #endif
  1406. }
  1407. else
  1408. {
  1409. __asm
  1410. {
  1411. mov edi, p
  1412. mov ebx, dword ptr [value]
  1413. mov ecx, dword ptr [value + 4]
  1414. mov eax, dword ptr [edi]
  1415. mov edx, dword ptr [edi + 4]
  1416. align 16
  1417. again:
  1418. lock cmpxchg8b qword ptr [edi]
  1419. jne again
  1420. };
  1421. }
  1422. }
  1423. template<typename T>
  1424. inline T
  1425. platform_load64(const volatile T * p) BOOST_NOEXCEPT
  1426. {
  1427. T value;
  1428. if (((uint32_t)p & 0x00000007) == 0)
  1429. {
  1430. #if defined(_M_IX86_FP) && _M_IX86_FP >= 2
  1431. __asm
  1432. {
  1433. mov edx, p
  1434. movq xmm4, qword ptr [edx]
  1435. movq value, xmm4
  1436. };
  1437. #else
  1438. __asm
  1439. {
  1440. mov edx, p
  1441. fild qword ptr [edx]
  1442. fistp value
  1443. };
  1444. #endif
  1445. }
  1446. else
  1447. {
  1448. // We don't care for comparison result here; the previous value will be stored into value anyway.
  1449. // Also we don't care for ebx and ecx values, they just have to be equal to eax and edx before cmpxchg8b.
  1450. __asm
  1451. {
  1452. mov edi, p
  1453. mov eax, ebx
  1454. mov edx, ecx
  1455. lock cmpxchg8b qword ptr [edi]
  1456. mov dword ptr [value], eax
  1457. mov dword ptr [value + 4], edx
  1458. };
  1459. }
  1460. return value;
  1461. }
  1462. #endif
  1463. } // namespace detail
  1464. } // namespace atomics
  1465. } // namespace boost
  1466. /* pull in 64-bit atomic type using cmpxchg8b above */
  1467. #if defined(BOOST_ATOMIC_X86_HAS_CMPXCHG8B)
  1468. #include <boost/atomic/detail/cas64strong.hpp>
  1469. #endif
  1470. #endif /* !defined(BOOST_ATOMIC_FORCE_FALLBACK) */
  1471. #ifdef _MSC_VER
  1472. #pragma warning(pop)
  1473. #endif
  1474. #endif