freelist.hpp 16 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648
  1. // lock-free freelist
  2. //
  3. // Copyright (C) 2008-2013 Tim Blechmann
  4. //
  5. // Distributed under the Boost Software License, Version 1.0. (See
  6. // accompanying file LICENSE_1_0.txt or copy at
  7. // http://www.boost.org/LICENSE_1_0.txt)
  8. #ifndef BOOST_LOCKFREE_FREELIST_HPP_INCLUDED
  9. #define BOOST_LOCKFREE_FREELIST_HPP_INCLUDED
  10. #include <limits>
  11. #include <memory>
  12. #include <boost/array.hpp>
  13. #include <boost/config.hpp>
  14. #include <boost/cstdint.hpp>
  15. #include <boost/noncopyable.hpp>
  16. #include <boost/static_assert.hpp>
  17. #include <boost/lockfree/detail/atomic.hpp>
  18. #include <boost/lockfree/detail/parameter.hpp>
  19. #include <boost/lockfree/detail/tagged_ptr.hpp>
  20. #if defined(_MSC_VER)
  21. #pragma warning(push)
  22. #pragma warning(disable: 4100) // unreferenced formal parameter
  23. #pragma warning(disable: 4127) // conditional expression is constant
  24. #endif
  25. namespace boost {
  26. namespace lockfree {
  27. namespace detail {
  28. template <typename T,
  29. typename Alloc = std::allocator<T>
  30. >
  31. class freelist_stack:
  32. Alloc
  33. {
  34. struct freelist_node
  35. {
  36. tagged_ptr<freelist_node> next;
  37. };
  38. typedef tagged_ptr<freelist_node> tagged_node_ptr;
  39. public:
  40. typedef tagged_ptr<T> tagged_node_handle;
  41. template <typename Allocator>
  42. freelist_stack (Allocator const & alloc, std::size_t n = 0):
  43. Alloc(alloc),
  44. pool_(tagged_node_ptr(NULL))
  45. {
  46. for (std::size_t i = 0; i != n; ++i) {
  47. T * node = Alloc::allocate(1);
  48. #ifdef BOOST_LOCKFREE_FREELIST_INIT_RUNS_DTOR
  49. destruct<false>(node);
  50. #else
  51. deallocate<false>(node);
  52. #endif
  53. }
  54. }
  55. template <bool ThreadSafe>
  56. void reserve (std::size_t count)
  57. {
  58. for (std::size_t i = 0; i != count; ++i) {
  59. T * node = Alloc::allocate(1);
  60. deallocate<ThreadSafe>(node);
  61. }
  62. }
  63. template <bool ThreadSafe, bool Bounded>
  64. T * construct (void)
  65. {
  66. T * node = allocate<ThreadSafe, Bounded>();
  67. if (node)
  68. new(node) T();
  69. return node;
  70. }
  71. template <bool ThreadSafe, bool Bounded, typename ArgumentType>
  72. T * construct (ArgumentType const & arg)
  73. {
  74. T * node = allocate<ThreadSafe, Bounded>();
  75. if (node)
  76. new(node) T(arg);
  77. return node;
  78. }
  79. template <bool ThreadSafe, bool Bounded, typename ArgumentType1, typename ArgumentType2>
  80. T * construct (ArgumentType1 const & arg1, ArgumentType2 const & arg2)
  81. {
  82. T * node = allocate<ThreadSafe, Bounded>();
  83. if (node)
  84. new(node) T(arg1, arg2);
  85. return node;
  86. }
  87. template <bool ThreadSafe>
  88. void destruct (tagged_node_handle tagged_ptr)
  89. {
  90. T * n = tagged_ptr.get_ptr();
  91. n->~T();
  92. deallocate<ThreadSafe>(n);
  93. }
  94. template <bool ThreadSafe>
  95. void destruct (T * n)
  96. {
  97. n->~T();
  98. deallocate<ThreadSafe>(n);
  99. }
  100. ~freelist_stack(void)
  101. {
  102. tagged_node_ptr current = pool_.load();
  103. while (current) {
  104. freelist_node * current_ptr = current.get_ptr();
  105. if (current_ptr)
  106. current = current_ptr->next;
  107. Alloc::deallocate((T*)current_ptr, 1);
  108. }
  109. }
  110. bool is_lock_free(void) const
  111. {
  112. return pool_.is_lock_free();
  113. }
  114. T * get_handle(T * pointer) const
  115. {
  116. return pointer;
  117. }
  118. T * get_handle(tagged_node_handle const & handle) const
  119. {
  120. return get_pointer(handle);
  121. }
  122. T * get_pointer(tagged_node_handle const & tptr) const
  123. {
  124. return tptr.get_ptr();
  125. }
  126. T * get_pointer(T * pointer) const
  127. {
  128. return pointer;
  129. }
  130. T * null_handle(void) const
  131. {
  132. return NULL;
  133. }
  134. protected: // allow use from subclasses
  135. template <bool ThreadSafe, bool Bounded>
  136. T * allocate (void)
  137. {
  138. if (ThreadSafe)
  139. return allocate_impl<Bounded>();
  140. else
  141. return allocate_impl_unsafe<Bounded>();
  142. }
  143. private:
  144. template <bool Bounded>
  145. T * allocate_impl (void)
  146. {
  147. tagged_node_ptr old_pool = pool_.load(memory_order_consume);
  148. for(;;) {
  149. if (!old_pool.get_ptr()) {
  150. if (!Bounded)
  151. return Alloc::allocate(1);
  152. else
  153. return 0;
  154. }
  155. freelist_node * new_pool_ptr = old_pool->next.get_ptr();
  156. tagged_node_ptr new_pool (new_pool_ptr, old_pool.get_next_tag());
  157. if (pool_.compare_exchange_weak(old_pool, new_pool)) {
  158. void * ptr = old_pool.get_ptr();
  159. return reinterpret_cast<T*>(ptr);
  160. }
  161. }
  162. }
  163. template <bool Bounded>
  164. T * allocate_impl_unsafe (void)
  165. {
  166. tagged_node_ptr old_pool = pool_.load(memory_order_relaxed);
  167. if (!old_pool.get_ptr()) {
  168. if (!Bounded)
  169. return Alloc::allocate(1);
  170. else
  171. return 0;
  172. }
  173. freelist_node * new_pool_ptr = old_pool->next.get_ptr();
  174. tagged_node_ptr new_pool (new_pool_ptr, old_pool.get_next_tag());
  175. pool_.store(new_pool, memory_order_relaxed);
  176. void * ptr = old_pool.get_ptr();
  177. return reinterpret_cast<T*>(ptr);
  178. }
  179. protected:
  180. template <bool ThreadSafe>
  181. void deallocate (T * n)
  182. {
  183. if (ThreadSafe)
  184. deallocate_impl(n);
  185. else
  186. deallocate_impl_unsafe(n);
  187. }
  188. private:
  189. void deallocate_impl (T * n)
  190. {
  191. void * node = n;
  192. tagged_node_ptr old_pool = pool_.load(memory_order_consume);
  193. freelist_node * new_pool_ptr = reinterpret_cast<freelist_node*>(node);
  194. for(;;) {
  195. tagged_node_ptr new_pool (new_pool_ptr, old_pool.get_tag());
  196. new_pool->next.set_ptr(old_pool.get_ptr());
  197. if (pool_.compare_exchange_weak(old_pool, new_pool))
  198. return;
  199. }
  200. }
  201. void deallocate_impl_unsafe (T * n)
  202. {
  203. void * node = n;
  204. tagged_node_ptr old_pool = pool_.load(memory_order_relaxed);
  205. freelist_node * new_pool_ptr = reinterpret_cast<freelist_node*>(node);
  206. tagged_node_ptr new_pool (new_pool_ptr, old_pool.get_tag());
  207. new_pool->next.set_ptr(old_pool.get_ptr());
  208. pool_.store(new_pool, memory_order_relaxed);
  209. }
  210. atomic<tagged_node_ptr> pool_;
  211. };
  212. class tagged_index
  213. {
  214. public:
  215. typedef boost::uint16_t tag_t;
  216. typedef boost::uint16_t index_t;
  217. /** uninitialized constructor */
  218. tagged_index(void) BOOST_NOEXCEPT //: index(0), tag(0)
  219. {}
  220. /** copy constructor */
  221. #ifdef BOOST_NO_CXX11_DEFAULTED_FUNCTIONS
  222. tagged_index(tagged_index const & rhs):
  223. index(rhs.index), tag(rhs.tag)
  224. {}
  225. #else
  226. tagged_index(tagged_index const & rhs) = default;
  227. #endif
  228. explicit tagged_index(index_t i, tag_t t = 0):
  229. index(i), tag(t)
  230. {}
  231. /** index access */
  232. /* @{ */
  233. index_t get_index() const
  234. {
  235. return index;
  236. }
  237. void set_index(index_t i)
  238. {
  239. index = i;
  240. }
  241. /* @} */
  242. /** tag access */
  243. /* @{ */
  244. tag_t get_tag() const
  245. {
  246. return tag;
  247. }
  248. tag_t get_next_tag() const
  249. {
  250. tag_t next = (get_tag() + 1) & (std::numeric_limits<tag_t>::max)();
  251. return next;
  252. }
  253. void set_tag(tag_t t)
  254. {
  255. tag = t;
  256. }
  257. /* @} */
  258. bool operator==(tagged_index const & rhs) const
  259. {
  260. return (index == rhs.index) && (tag == rhs.tag);
  261. }
  262. bool operator!=(tagged_index const & rhs) const
  263. {
  264. return !operator==(rhs);
  265. }
  266. protected:
  267. index_t index;
  268. tag_t tag;
  269. };
  270. template <typename T,
  271. std::size_t size>
  272. struct compiletime_sized_freelist_storage
  273. {
  274. // array-based freelists only support a 16bit address space.
  275. BOOST_STATIC_ASSERT(size < 65536);
  276. boost::array<char, size * sizeof(T)> data;
  277. // unused ... only for API purposes
  278. template <typename Allocator>
  279. compiletime_sized_freelist_storage(Allocator const & /* alloc */, std::size_t /* count */)
  280. {}
  281. T * nodes(void) const
  282. {
  283. return reinterpret_cast<T*>(const_cast<char*>(data.data()));
  284. }
  285. std::size_t node_count(void) const
  286. {
  287. return size;
  288. }
  289. };
  290. template <typename T,
  291. typename Alloc = std::allocator<T> >
  292. struct runtime_sized_freelist_storage:
  293. Alloc
  294. {
  295. T * nodes_;
  296. std::size_t node_count_;
  297. template <typename Allocator>
  298. runtime_sized_freelist_storage(Allocator const & alloc, std::size_t count):
  299. Alloc(alloc), node_count_(count)
  300. {
  301. if (count > 65535)
  302. boost::throw_exception(std::runtime_error("boost.lockfree: freelist size is limited to a maximum of 65535 objects"));
  303. nodes_ = Alloc::allocate(count);
  304. }
  305. ~runtime_sized_freelist_storage(void)
  306. {
  307. Alloc::deallocate(nodes_, node_count_);
  308. }
  309. T * nodes(void) const
  310. {
  311. return nodes_;
  312. }
  313. std::size_t node_count(void) const
  314. {
  315. return node_count_;
  316. }
  317. };
  318. template <typename T,
  319. typename NodeStorage = runtime_sized_freelist_storage<T>
  320. >
  321. class fixed_size_freelist:
  322. NodeStorage
  323. {
  324. struct freelist_node
  325. {
  326. tagged_index next;
  327. };
  328. typedef tagged_index::index_t index_t;
  329. void initialize(void)
  330. {
  331. T * nodes = NodeStorage::nodes();
  332. for (std::size_t i = 0; i != NodeStorage::node_count(); ++i) {
  333. tagged_index * next_index = reinterpret_cast<tagged_index*>(nodes + i);
  334. next_index->set_index(null_handle());
  335. #ifdef BOOST_LOCKFREE_FREELIST_INIT_RUNS_DTOR
  336. destruct<false>(nodes + i);
  337. #else
  338. deallocate<false>(static_cast<index_t>(i));
  339. #endif
  340. }
  341. }
  342. public:
  343. typedef tagged_index tagged_node_handle;
  344. template <typename Allocator>
  345. fixed_size_freelist (Allocator const & alloc, std::size_t count):
  346. NodeStorage(alloc, count),
  347. pool_(tagged_index(static_cast<index_t>(count), 0))
  348. {
  349. initialize();
  350. }
  351. fixed_size_freelist (void):
  352. pool_(tagged_index(NodeStorage::node_count(), 0))
  353. {
  354. initialize();
  355. }
  356. template <bool ThreadSafe, bool Bounded>
  357. T * construct (void)
  358. {
  359. index_t node_index = allocate<ThreadSafe>();
  360. if (node_index == null_handle())
  361. return NULL;
  362. T * node = NodeStorage::nodes() + node_index;
  363. new(node) T();
  364. return node;
  365. }
  366. template <bool ThreadSafe, bool Bounded, typename ArgumentType>
  367. T * construct (ArgumentType const & arg)
  368. {
  369. index_t node_index = allocate<ThreadSafe>();
  370. if (node_index == null_handle())
  371. return NULL;
  372. T * node = NodeStorage::nodes() + node_index;
  373. new(node) T(arg);
  374. return node;
  375. }
  376. template <bool ThreadSafe, bool Bounded, typename ArgumentType1, typename ArgumentType2>
  377. T * construct (ArgumentType1 const & arg1, ArgumentType2 const & arg2)
  378. {
  379. index_t node_index = allocate<ThreadSafe>();
  380. if (node_index == null_handle())
  381. return NULL;
  382. T * node = NodeStorage::nodes() + node_index;
  383. new(node) T(arg1, arg2);
  384. return node;
  385. }
  386. template <bool ThreadSafe>
  387. void destruct (tagged_node_handle tagged_index)
  388. {
  389. index_t index = tagged_index.get_index();
  390. T * n = NodeStorage::nodes() + index;
  391. n->~T();
  392. deallocate<ThreadSafe>(index);
  393. }
  394. template <bool ThreadSafe>
  395. void destruct (T * n)
  396. {
  397. n->~T();
  398. deallocate<ThreadSafe>(n - NodeStorage::nodes());
  399. }
  400. bool is_lock_free(void) const
  401. {
  402. return pool_.is_lock_free();
  403. }
  404. index_t null_handle(void) const
  405. {
  406. return static_cast<index_t>(NodeStorage::node_count());
  407. }
  408. index_t get_handle(T * pointer) const
  409. {
  410. if (pointer == NULL)
  411. return null_handle();
  412. else
  413. return static_cast<index_t>(pointer - NodeStorage::nodes());
  414. }
  415. index_t get_handle(tagged_node_handle const & handle) const
  416. {
  417. return handle.get_index();
  418. }
  419. T * get_pointer(tagged_node_handle const & tptr) const
  420. {
  421. return get_pointer(tptr.get_index());
  422. }
  423. T * get_pointer(index_t index) const
  424. {
  425. if (index == null_handle())
  426. return 0;
  427. else
  428. return NodeStorage::nodes() + index;
  429. }
  430. T * get_pointer(T * ptr) const
  431. {
  432. return ptr;
  433. }
  434. protected: // allow use from subclasses
  435. template <bool ThreadSafe>
  436. index_t allocate (void)
  437. {
  438. if (ThreadSafe)
  439. return allocate_impl();
  440. else
  441. return allocate_impl_unsafe();
  442. }
  443. private:
  444. index_t allocate_impl (void)
  445. {
  446. tagged_index old_pool = pool_.load(memory_order_consume);
  447. for(;;) {
  448. index_t index = old_pool.get_index();
  449. if (index == null_handle())
  450. return index;
  451. T * old_node = NodeStorage::nodes() + index;
  452. tagged_index * next_index = reinterpret_cast<tagged_index*>(old_node);
  453. tagged_index new_pool(next_index->get_index(), old_pool.get_next_tag());
  454. if (pool_.compare_exchange_weak(old_pool, new_pool))
  455. return old_pool.get_index();
  456. }
  457. }
  458. index_t allocate_impl_unsafe (void)
  459. {
  460. tagged_index old_pool = pool_.load(memory_order_consume);
  461. index_t index = old_pool.get_index();
  462. if (index == null_handle())
  463. return index;
  464. T * old_node = NodeStorage::nodes() + index;
  465. tagged_index * next_index = reinterpret_cast<tagged_index*>(old_node);
  466. tagged_index new_pool(next_index->get_index(), old_pool.get_next_tag());
  467. pool_.store(new_pool, memory_order_relaxed);
  468. return old_pool.get_index();
  469. }
  470. template <bool ThreadSafe>
  471. void deallocate (index_t index)
  472. {
  473. if (ThreadSafe)
  474. deallocate_impl(index);
  475. else
  476. deallocate_impl_unsafe(index);
  477. }
  478. void deallocate_impl (index_t index)
  479. {
  480. freelist_node * new_pool_node = reinterpret_cast<freelist_node*>(NodeStorage::nodes() + index);
  481. tagged_index old_pool = pool_.load(memory_order_consume);
  482. for(;;) {
  483. tagged_index new_pool (index, old_pool.get_tag());
  484. new_pool_node->next.set_index(old_pool.get_index());
  485. if (pool_.compare_exchange_weak(old_pool, new_pool))
  486. return;
  487. }
  488. }
  489. void deallocate_impl_unsafe (index_t index)
  490. {
  491. freelist_node * new_pool_node = reinterpret_cast<freelist_node*>(NodeStorage::nodes() + index);
  492. tagged_index old_pool = pool_.load(memory_order_consume);
  493. tagged_index new_pool (index, old_pool.get_tag());
  494. new_pool_node->next.set_index(old_pool.get_index());
  495. pool_.store(new_pool);
  496. }
  497. atomic<tagged_index> pool_;
  498. };
  499. template <typename T,
  500. typename Alloc,
  501. bool IsCompileTimeSized,
  502. bool IsFixedSize,
  503. std::size_t Capacity
  504. >
  505. struct select_freelist
  506. {
  507. typedef typename mpl::if_c<IsCompileTimeSized,
  508. compiletime_sized_freelist_storage<T, Capacity>,
  509. runtime_sized_freelist_storage<T, Alloc>
  510. >::type fixed_sized_storage_type;
  511. typedef typename mpl::if_c<IsCompileTimeSized || IsFixedSize,
  512. fixed_size_freelist<T, fixed_sized_storage_type>,
  513. freelist_stack<T, Alloc>
  514. >::type type;
  515. };
  516. template <typename T, bool IsNodeBased>
  517. struct select_tagged_handle
  518. {
  519. typedef typename mpl::if_c<IsNodeBased,
  520. tagged_ptr<T>,
  521. tagged_index
  522. >::type tagged_handle_type;
  523. typedef typename mpl::if_c<IsNodeBased,
  524. T*,
  525. typename tagged_index::index_t
  526. >::type handle_type;
  527. };
  528. } /* namespace detail */
  529. } /* namespace lockfree */
  530. } /* namespace boost */
  531. #if defined(_MSC_VER)
  532. #pragma warning(pop)
  533. #endif
  534. #endif /* BOOST_LOCKFREE_FREELIST_HPP_INCLUDED */