adaptive_node_pool_impl.hpp 35 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874
  1. //////////////////////////////////////////////////////////////////////////////
  2. //
  3. // (C) Copyright Ion Gaztanaga 2005-2012. Distributed under the Boost
  4. // Software License, Version 1.0. (See accompanying file
  5. // LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
  6. //
  7. // See http://www.boost.org/libs/container for documentation.
  8. //
  9. //////////////////////////////////////////////////////////////////////////////
  10. #ifndef BOOST_CONTAINER_DETAIL_ADAPTIVE_NODE_POOL_IMPL_HPP
  11. #define BOOST_CONTAINER_DETAIL_ADAPTIVE_NODE_POOL_IMPL_HPP
  12. #if defined(_MSC_VER)
  13. # pragma once
  14. #endif
  15. #include "config_begin.hpp"
  16. #include <boost/container/container_fwd.hpp>
  17. #include <boost/container/detail/workaround.hpp>
  18. #include <boost/container/detail/utilities.hpp>
  19. #include <boost/intrusive/pointer_traits.hpp>
  20. #include <boost/intrusive/set.hpp>
  21. #include <boost/intrusive/list.hpp>
  22. #include <boost/intrusive/slist.hpp>
  23. #include <boost/container/detail/type_traits.hpp>
  24. #include <boost/container/detail/math_functions.hpp>
  25. #include <boost/container/detail/mpl.hpp>
  26. #include <boost/container/detail/pool_common.hpp>
  27. #include <boost/container/throw_exception.hpp>
  28. #include <boost/assert.hpp>
  29. #include <boost/detail/no_exceptions_support.hpp>
  30. #include <cstddef>
  31. namespace boost {
  32. namespace container {
  33. namespace adaptive_pool_flag {
  34. static const unsigned int none = 0u;
  35. static const unsigned int align_only = 1u << 0u;
  36. static const unsigned int size_ordered = 1u << 1u;
  37. static const unsigned int address_ordered = 1u << 2u;
  38. } //namespace adaptive_pool_flag{
  39. namespace container_detail {
  40. template<class size_type>
  41. struct hdr_offset_holder_t
  42. {
  43. hdr_offset_holder_t(size_type offset = 0)
  44. : hdr_offset(offset)
  45. {}
  46. size_type hdr_offset;
  47. };
  48. template<class SizeType, unsigned int Flags>
  49. struct less_func;
  50. template<class SizeType>
  51. struct less_func<SizeType, adaptive_pool_flag::none>
  52. {
  53. static bool less(SizeType, SizeType, const void *, const void *)
  54. { return true; }
  55. };
  56. template<class SizeType>
  57. struct less_func<SizeType, adaptive_pool_flag::size_ordered>
  58. {
  59. static bool less(SizeType ls, SizeType rs, const void *, const void *)
  60. { return ls < rs; }
  61. };
  62. template<class SizeType>
  63. struct less_func<SizeType, adaptive_pool_flag::address_ordered>
  64. {
  65. static bool less(SizeType, SizeType, const void *la, const void *ra)
  66. { return &la < &ra; }
  67. };
  68. template<class SizeType>
  69. struct less_func<SizeType, adaptive_pool_flag::size_ordered | adaptive_pool_flag::address_ordered>
  70. {
  71. static bool less(SizeType ls, SizeType rs, const void *la, const void *ra)
  72. { return (ls < rs) || ((ls == rs) && (la < ra)); }
  73. };
  74. template<class VoidPointer, class SizeType, bool ordered>
  75. struct block_container_traits
  76. {
  77. typedef typename bi::make_set_base_hook
  78. < bi::void_pointer<VoidPointer>
  79. , bi::optimize_size<true>
  80. , bi::link_mode<bi::normal_link> >::type hook_t;
  81. template<class T>
  82. struct container
  83. {
  84. typedef typename bi::make_multiset
  85. <T, bi::base_hook<hook_t>, bi::size_type<SizeType> >::type type;
  86. };
  87. template<class Container>
  88. static void reinsert_was_used(Container &container, typename Container::reference v, bool)
  89. {
  90. typedef typename Container::const_iterator const_block_iterator;
  91. const const_block_iterator this_block
  92. (Container::s_iterator_to(const_cast<typename Container::const_reference>(v)));
  93. const_block_iterator next_block(this_block);
  94. if(++next_block != container.cend()){
  95. if(this_block->free_nodes.size() > next_block->free_nodes.size()){
  96. container.erase(this_block);
  97. container.insert(v);
  98. }
  99. }
  100. }
  101. template<class Container>
  102. static void insert_was_empty(Container &container, typename Container::value_type &v, bool)
  103. {
  104. container.insert(v);
  105. }
  106. template<class Container>
  107. static void erase_first(Container &container)
  108. {
  109. container.erase(container.cbegin());
  110. }
  111. template<class Container>
  112. static void erase_last(Container &container)
  113. {
  114. container.erase(--container.cend());
  115. }
  116. };
  117. template<class VoidPointer, class SizeType>
  118. struct block_container_traits<VoidPointer, SizeType, false>
  119. {
  120. typedef typename bi::make_list_base_hook
  121. < bi::void_pointer<VoidPointer>
  122. , bi::link_mode<bi::normal_link> >::type hook_t;
  123. template<class T>
  124. struct container
  125. {
  126. typedef typename bi::make_list
  127. <T, bi::base_hook<hook_t>, bi::size_type<SizeType>, bi::constant_time_size<false> >::type type;
  128. };
  129. template<class Container>
  130. static void reinsert_was_used(Container &container, typename Container::value_type &v, bool is_full)
  131. {
  132. if(is_full){
  133. container.erase(Container::s_iterator_to(v));
  134. container.push_back(v);
  135. }
  136. }
  137. template<class Container>
  138. static void insert_was_empty(Container &container, typename Container::value_type &v, bool is_full)
  139. {
  140. if(is_full){
  141. container.push_back(v);
  142. }
  143. else{
  144. container.push_front(v);
  145. }
  146. }
  147. template<class Container>
  148. static void erase_first(Container &container)
  149. {
  150. container.pop_front();
  151. }
  152. template<class Container>
  153. static void erase_last(Container &container)
  154. {
  155. container.pop_back();
  156. }
  157. };
  158. template<class MultiallocationChain, class VoidPointer, class SizeType, unsigned int Flags>
  159. struct adaptive_pool_types
  160. {
  161. typedef VoidPointer void_pointer;
  162. static const bool ordered = (Flags & (adaptive_pool_flag::size_ordered | adaptive_pool_flag::address_ordered)) != 0;
  163. typedef block_container_traits<VoidPointer, SizeType, ordered> block_container_traits_t;
  164. typedef typename block_container_traits_t::hook_t hook_t;
  165. typedef hdr_offset_holder_t<SizeType> hdr_offset_holder;
  166. static const unsigned int order_flags = Flags & (adaptive_pool_flag::size_ordered | adaptive_pool_flag::address_ordered);
  167. typedef MultiallocationChain free_nodes_t;
  168. struct block_info_t
  169. : public hdr_offset_holder,
  170. public hook_t
  171. {
  172. //An intrusive list of free node from this block
  173. free_nodes_t free_nodes;
  174. friend bool operator <(const block_info_t &l, const block_info_t &r)
  175. {
  176. return less_func<SizeType, order_flags>::
  177. less(l.free_nodes.size(), r.free_nodes.size(), &l , &r);
  178. }
  179. friend bool operator ==(const block_info_t &l, const block_info_t &r)
  180. { return &l == &r; }
  181. };
  182. typedef typename block_container_traits_t:: template container<block_info_t>::type block_container_t;
  183. };
  184. template<class size_type>
  185. inline size_type calculate_alignment
  186. ( size_type overhead_percent, size_type real_node_size
  187. , size_type hdr_size, size_type hdr_offset_size, size_type payload_per_allocation)
  188. {
  189. //to-do: handle real_node_size != node_size
  190. const size_type divisor = overhead_percent*real_node_size;
  191. const size_type dividend = hdr_offset_size*100;
  192. size_type elements_per_subblock = (dividend - 1)/divisor + 1;
  193. size_type candidate_power_of_2 =
  194. upper_power_of_2(elements_per_subblock*real_node_size + hdr_offset_size);
  195. bool overhead_satisfied = false;
  196. //Now calculate the wors-case overhead for a subblock
  197. const size_type max_subblock_overhead = hdr_size + payload_per_allocation;
  198. while(!overhead_satisfied){
  199. elements_per_subblock = (candidate_power_of_2 - max_subblock_overhead)/real_node_size;
  200. const size_type overhead_size = candidate_power_of_2 - elements_per_subblock*real_node_size;
  201. if(overhead_size*100/candidate_power_of_2 < overhead_percent){
  202. overhead_satisfied = true;
  203. }
  204. else{
  205. candidate_power_of_2 <<= 1;
  206. }
  207. }
  208. return candidate_power_of_2;
  209. }
  210. template<class size_type>
  211. inline void calculate_num_subblocks
  212. (size_type alignment, size_type real_node_size, size_type elements_per_block
  213. , size_type &num_subblocks, size_type &real_num_node, size_type overhead_percent
  214. , size_type hdr_size, size_type hdr_offset_size, size_type payload_per_allocation)
  215. {
  216. const size_type hdr_subblock_elements = (alignment - hdr_size - payload_per_allocation)/real_node_size;
  217. size_type elements_per_subblock = (alignment - hdr_offset_size)/real_node_size;
  218. size_type possible_num_subblock = (elements_per_block - 1)/elements_per_subblock + 1;
  219. while(((possible_num_subblock-1)*elements_per_subblock + hdr_subblock_elements) < elements_per_block){
  220. ++possible_num_subblock;
  221. }
  222. elements_per_subblock = (alignment - hdr_offset_size)/real_node_size;
  223. bool overhead_satisfied = false;
  224. while(!overhead_satisfied){
  225. const size_type total_data = (elements_per_subblock*(possible_num_subblock-1) + hdr_subblock_elements)*real_node_size;
  226. const size_type total_size = alignment*possible_num_subblock;
  227. if((total_size - total_data)*100/total_size < overhead_percent){
  228. overhead_satisfied = true;
  229. }
  230. else{
  231. ++possible_num_subblock;
  232. }
  233. }
  234. num_subblocks = possible_num_subblock;
  235. real_num_node = (possible_num_subblock-1)*elements_per_subblock + hdr_subblock_elements;
  236. }
  237. template<class SegmentManagerBase, unsigned int Flags>
  238. class private_adaptive_node_pool_impl
  239. {
  240. //Non-copyable
  241. private_adaptive_node_pool_impl();
  242. private_adaptive_node_pool_impl(const private_adaptive_node_pool_impl &);
  243. private_adaptive_node_pool_impl &operator=(const private_adaptive_node_pool_impl &);
  244. typedef private_adaptive_node_pool_impl this_type;
  245. typedef typename SegmentManagerBase::void_pointer void_pointer;
  246. static const typename SegmentManagerBase::
  247. size_type PayloadPerAllocation = SegmentManagerBase::PayloadPerAllocation;
  248. //Flags
  249. //align_only
  250. static const bool AlignOnly = (Flags & adaptive_pool_flag::align_only) != 0;
  251. typedef bool_<AlignOnly> IsAlignOnly;
  252. typedef true_ AlignOnlyTrue;
  253. typedef false_ AlignOnlyFalse;
  254. //size_ordered
  255. static const bool SizeOrdered = (Flags & adaptive_pool_flag::size_ordered) != 0;
  256. typedef bool_<SizeOrdered> IsSizeOrdered;
  257. typedef true_ SizeOrderedTrue;
  258. typedef false_ SizeOrderedFalse;
  259. //address_ordered
  260. static const bool AddressOrdered = (Flags & adaptive_pool_flag::address_ordered) != 0;
  261. typedef bool_<AddressOrdered> IsAddressOrdered;
  262. typedef true_ AddressOrderedTrue;
  263. typedef false_ AddressOrderedFalse;
  264. public:
  265. typedef typename SegmentManagerBase::multiallocation_chain multiallocation_chain;
  266. typedef typename SegmentManagerBase::size_type size_type;
  267. private:
  268. typedef adaptive_pool_types
  269. <multiallocation_chain, void_pointer, size_type, Flags> adaptive_pool_types_t;
  270. typedef typename adaptive_pool_types_t::free_nodes_t free_nodes_t;
  271. typedef typename adaptive_pool_types_t::block_info_t block_info_t;
  272. typedef typename adaptive_pool_types_t::block_container_t block_container_t;
  273. typedef typename adaptive_pool_types_t::block_container_traits_t block_container_traits_t;
  274. typedef typename block_container_t::iterator block_iterator;
  275. typedef typename block_container_t::const_iterator const_block_iterator;
  276. typedef typename adaptive_pool_types_t::hdr_offset_holder hdr_offset_holder;
  277. static const size_type MaxAlign = alignment_of<void_pointer>::value;
  278. static const size_type HdrSize = ((sizeof(block_info_t)-1)/MaxAlign+1)*MaxAlign;
  279. static const size_type HdrOffsetSize = ((sizeof(hdr_offset_holder)-1)/MaxAlign+1)*MaxAlign;
  280. public:
  281. //!Segment manager typedef
  282. typedef SegmentManagerBase segment_manager_base_type;
  283. //!Constructor from a segment manager. Never throws
  284. private_adaptive_node_pool_impl
  285. ( segment_manager_base_type *segment_mngr_base
  286. , size_type node_size
  287. , size_type nodes_per_block
  288. , size_type max_free_blocks
  289. , unsigned char overhead_percent
  290. )
  291. : m_max_free_blocks(max_free_blocks)
  292. , m_real_node_size(lcm(node_size, size_type(alignment_of<void_pointer>::value)))
  293. //Round the size to a power of two value.
  294. //This is the total memory size (including payload) that we want to
  295. //allocate from the general-purpose allocator
  296. , m_real_block_alignment
  297. (AlignOnly ?
  298. upper_power_of_2(HdrSize + m_real_node_size*nodes_per_block) :
  299. calculate_alignment( (size_type)overhead_percent, m_real_node_size
  300. , HdrSize, HdrOffsetSize, PayloadPerAllocation))
  301. //This is the real number of nodes per block
  302. , m_num_subblocks(0)
  303. , m_real_num_node(AlignOnly ? (m_real_block_alignment - PayloadPerAllocation - HdrSize)/m_real_node_size : 0)
  304. //General purpose allocator
  305. , mp_segment_mngr_base(segment_mngr_base)
  306. , m_block_container()
  307. , m_totally_free_blocks(0)
  308. {
  309. if(!AlignOnly){
  310. calculate_num_subblocks
  311. ( m_real_block_alignment
  312. , m_real_node_size
  313. , nodes_per_block
  314. , m_num_subblocks
  315. , m_real_num_node
  316. , (size_type)overhead_percent
  317. , HdrSize
  318. , HdrOffsetSize
  319. , PayloadPerAllocation);
  320. }
  321. }
  322. //!Destructor. Deallocates all allocated blocks. Never throws
  323. ~private_adaptive_node_pool_impl()
  324. { this->priv_clear(); }
  325. size_type get_real_num_node() const
  326. { return m_real_num_node; }
  327. //!Returns the segment manager. Never throws
  328. segment_manager_base_type* get_segment_manager_base()const
  329. { return container_detail::to_raw_pointer(mp_segment_mngr_base); }
  330. //!Allocates array of count elements. Can throw
  331. void *allocate_node()
  332. {
  333. this->priv_invariants();
  334. //If there are no free nodes we allocate a new block
  335. if(!m_block_container.empty()){
  336. //We take the first free node the multiset can't be empty
  337. free_nodes_t &free_nodes = m_block_container.begin()->free_nodes;
  338. BOOST_ASSERT(!free_nodes.empty());
  339. const size_type free_nodes_count = free_nodes.size();
  340. void *first_node = container_detail::to_raw_pointer(free_nodes.pop_front());
  341. if(free_nodes.empty()){
  342. block_container_traits_t::erase_first(m_block_container);
  343. }
  344. m_totally_free_blocks -= static_cast<size_type>(free_nodes_count == m_real_num_node);
  345. this->priv_invariants();
  346. return first_node;
  347. }
  348. else{
  349. multiallocation_chain chain;
  350. this->priv_append_from_new_blocks(1, chain, IsAlignOnly());
  351. return container_detail::to_raw_pointer(chain.pop_front());
  352. }
  353. }
  354. //!Deallocates an array pointed by ptr. Never throws
  355. void deallocate_node(void *pElem)
  356. {
  357. this->priv_invariants();
  358. block_info_t &block_info = *this->priv_block_from_node(pElem);
  359. BOOST_ASSERT(block_info.free_nodes.size() < m_real_num_node);
  360. //We put the node at the beginning of the free node list
  361. block_info.free_nodes.push_back(void_pointer(pElem));
  362. //The loop reinserts all blocks except the last one
  363. this->priv_reinsert_block(block_info, block_info.free_nodes.size() == 1);
  364. this->priv_deallocate_free_blocks(m_max_free_blocks);
  365. this->priv_invariants();
  366. }
  367. //!Allocates n nodes.
  368. //!Can throw
  369. void allocate_nodes(const size_type n, multiallocation_chain &chain)
  370. {
  371. size_type i = 0;
  372. BOOST_TRY{
  373. this->priv_invariants();
  374. while(i != n){
  375. //If there are no free nodes we allocate all needed blocks
  376. if (m_block_container.empty()){
  377. this->priv_append_from_new_blocks(n - i, chain, IsAlignOnly());
  378. BOOST_ASSERT(m_block_container.empty() || (++m_block_container.cbegin() == m_block_container.cend()));
  379. BOOST_ASSERT(chain.size() == n);
  380. break;
  381. }
  382. free_nodes_t &free_nodes = m_block_container.begin()->free_nodes;
  383. const size_type free_nodes_count_before = free_nodes.size();
  384. m_totally_free_blocks -= static_cast<size_type>(free_nodes_count_before == m_real_num_node);
  385. const size_type num_left = n-i;
  386. const size_type num_elems = (num_left < free_nodes_count_before) ? num_left : free_nodes_count_before;
  387. typedef typename free_nodes_t::iterator free_nodes_iterator;
  388. if(num_left < free_nodes_count_before){
  389. const free_nodes_iterator it_bbeg(free_nodes.before_begin());
  390. free_nodes_iterator it_bend(it_bbeg);
  391. for(size_type j = 0; j != num_elems; ++j){
  392. ++it_bend;
  393. }
  394. free_nodes_iterator it_end = it_bend; ++it_end;
  395. free_nodes_iterator it_beg = it_bbeg; ++it_beg;
  396. free_nodes.erase_after(it_bbeg, it_end, num_elems);
  397. chain.incorporate_after(chain.last(), &*it_beg, &*it_bend, num_elems);
  398. //chain.splice_after(chain.last(), free_nodes, it_bbeg, it_bend, num_elems);
  399. BOOST_ASSERT(!free_nodes.empty());
  400. }
  401. else{
  402. const free_nodes_iterator it_beg(free_nodes.begin()), it_bend(free_nodes.last());
  403. free_nodes.clear();
  404. chain.incorporate_after(chain.last(), &*it_beg, &*it_bend, num_elems);
  405. block_container_traits_t::erase_first(m_block_container);
  406. }
  407. i += num_elems;
  408. }
  409. }
  410. BOOST_CATCH(...){
  411. this->deallocate_nodes(chain);
  412. BOOST_RETHROW
  413. }
  414. BOOST_CATCH_END
  415. this->priv_invariants();
  416. }
  417. //!Deallocates a linked list of nodes. Never throws
  418. void deallocate_nodes(multiallocation_chain &nodes)
  419. {
  420. this->priv_invariants();
  421. //To take advantage of node locality, wait until two
  422. //nodes belong to different blocks. Only then reinsert
  423. //the block of the first node in the block tree.
  424. //Cache of the previous block
  425. block_info_t *prev_block_info = 0;
  426. //If block was empty before this call, it's not already
  427. //inserted in the block tree.
  428. bool prev_block_was_empty = false;
  429. typedef typename free_nodes_t::iterator free_nodes_iterator;
  430. {
  431. const free_nodes_iterator itbb(nodes.before_begin()), ite(nodes.end());
  432. free_nodes_iterator itf(nodes.begin()), itbf(itbb);
  433. size_type splice_node_count = size_type(-1);
  434. while(itf != ite){
  435. void *pElem = container_detail::to_raw_pointer(&*itf);
  436. block_info_t &block_info = *this->priv_block_from_node(pElem);
  437. BOOST_ASSERT(block_info.free_nodes.size() < m_real_num_node);
  438. ++splice_node_count;
  439. //If block change is detected calculate the cached block position in the tree
  440. if(&block_info != prev_block_info){
  441. if(prev_block_info){ //Make sure we skip the initial "dummy" cache
  442. free_nodes_iterator it(itbb); ++it;
  443. nodes.erase_after(itbb, itf, splice_node_count);
  444. prev_block_info->free_nodes.incorporate_after(prev_block_info->free_nodes.last(), &*it, &*itbf, splice_node_count);
  445. this->priv_reinsert_block(*prev_block_info, prev_block_was_empty);
  446. splice_node_count = 0;
  447. }
  448. //Update cache with new data
  449. prev_block_was_empty = block_info.free_nodes.empty();
  450. prev_block_info = &block_info;
  451. }
  452. itbf = itf;
  453. ++itf;
  454. }
  455. }
  456. if(prev_block_info){
  457. //The loop reinserts all blocks except the last one
  458. const free_nodes_iterator itfirst(nodes.begin()), itlast(nodes.last());
  459. const size_type splice_node_count = nodes.size();
  460. nodes.clear();
  461. prev_block_info->free_nodes.incorporate_after(prev_block_info->free_nodes.last(), &*itfirst, &*itlast, splice_node_count);
  462. this->priv_reinsert_block(*prev_block_info, prev_block_was_empty);
  463. this->priv_invariants();
  464. this->priv_deallocate_free_blocks(m_max_free_blocks);
  465. }
  466. }
  467. void deallocate_free_blocks()
  468. { this->priv_deallocate_free_blocks(0); }
  469. size_type num_free_nodes()
  470. {
  471. typedef typename block_container_t::const_iterator citerator;
  472. size_type count = 0;
  473. citerator it (m_block_container.begin()), itend(m_block_container.end());
  474. for(; it != itend; ++it){
  475. count += it->free_nodes.size();
  476. }
  477. return count;
  478. }
  479. void swap(private_adaptive_node_pool_impl &other)
  480. {
  481. BOOST_ASSERT(m_max_free_blocks == other.m_max_free_blocks);
  482. BOOST_ASSERT(m_real_node_size == other.m_real_node_size);
  483. BOOST_ASSERT(m_real_block_alignment == other.m_real_block_alignment);
  484. BOOST_ASSERT(m_real_num_node == other.m_real_num_node);
  485. std::swap(mp_segment_mngr_base, other.mp_segment_mngr_base);
  486. std::swap(m_totally_free_blocks, other.m_totally_free_blocks);
  487. m_block_container.swap(other.m_block_container);
  488. }
  489. //Deprecated, use deallocate_free_blocks
  490. void deallocate_free_chunks()
  491. { this->priv_deallocate_free_blocks(0); }
  492. private:
  493. void priv_deallocate_free_blocks(size_type max_free_blocks)
  494. { //Trampoline function to ease inlining
  495. if(m_totally_free_blocks > max_free_blocks){
  496. this->priv_deallocate_free_blocks_impl(max_free_blocks);
  497. }
  498. }
  499. void priv_deallocate_free_blocks_impl(size_type max_free_blocks)
  500. {
  501. this->priv_invariants();
  502. //Now check if we've reached the free nodes limit
  503. //and check if we have free blocks. If so, deallocate as much
  504. //as we can to stay below the limit
  505. multiallocation_chain chain;
  506. {
  507. const const_block_iterator itend = m_block_container.cend();
  508. const_block_iterator it = itend;
  509. --it;
  510. size_type totally_free_blocks = m_totally_free_blocks;
  511. for( ; totally_free_blocks > max_free_blocks; --totally_free_blocks){
  512. BOOST_ASSERT(it->free_nodes.size() == m_real_num_node);
  513. void *addr = priv_first_subblock_from_block(const_cast<block_info_t*>(&*it));
  514. --it;
  515. block_container_traits_t::erase_last(m_block_container);
  516. chain.push_front(void_pointer(addr));
  517. }
  518. BOOST_ASSERT((m_totally_free_blocks - max_free_blocks) == chain.size());
  519. m_totally_free_blocks = max_free_blocks;
  520. }
  521. this->mp_segment_mngr_base->deallocate_many(chain);
  522. }
  523. void priv_reinsert_block(block_info_t &prev_block_info, const bool prev_block_was_empty)
  524. {
  525. //Cache the free nodes from the block
  526. const size_type this_block_free_nodes = prev_block_info.free_nodes.size();
  527. const bool is_full = this_block_free_nodes == m_real_num_node;
  528. //Update free block count
  529. m_totally_free_blocks += static_cast<size_type>(is_full);
  530. if(prev_block_was_empty){
  531. block_container_traits_t::insert_was_empty(m_block_container, prev_block_info, is_full);
  532. }
  533. else{
  534. block_container_traits_t::reinsert_was_used(m_block_container, prev_block_info, is_full);
  535. }
  536. }
  537. class block_destroyer;
  538. friend class block_destroyer;
  539. class block_destroyer
  540. {
  541. public:
  542. block_destroyer(const this_type *impl, multiallocation_chain &chain)
  543. : mp_impl(impl), m_chain(chain)
  544. {}
  545. void operator()(typename block_container_t::pointer to_deallocate)
  546. { return this->do_destroy(to_deallocate, IsAlignOnly()); }
  547. private:
  548. void do_destroy(typename block_container_t::pointer to_deallocate, AlignOnlyTrue)
  549. {
  550. BOOST_ASSERT(to_deallocate->free_nodes.size() == mp_impl->m_real_num_node);
  551. m_chain.push_back(to_deallocate);
  552. }
  553. void do_destroy(typename block_container_t::pointer to_deallocate, AlignOnlyFalse)
  554. {
  555. BOOST_ASSERT(to_deallocate->free_nodes.size() == mp_impl->m_real_num_node);
  556. BOOST_ASSERT(0 == to_deallocate->hdr_offset);
  557. hdr_offset_holder *hdr_off_holder =
  558. mp_impl->priv_first_subblock_from_block(container_detail::to_raw_pointer(to_deallocate));
  559. m_chain.push_back(hdr_off_holder);
  560. }
  561. const this_type *mp_impl;
  562. multiallocation_chain &m_chain;
  563. };
  564. //This macro will activate invariant checking. Slow, but helpful for debugging the code.
  565. //#define BOOST_CONTAINER_ADAPTIVE_NODE_POOL_CHECK_INVARIANTS
  566. void priv_invariants()
  567. #ifdef BOOST_CONTAINER_ADAPTIVE_NODE_POOL_CHECK_INVARIANTS
  568. #undef BOOST_CONTAINER_ADAPTIVE_NODE_POOL_CHECK_INVARIANTS
  569. {
  570. const const_block_iterator itend(m_block_container.end());
  571. { //We iterate through the block tree to free the memory
  572. const_block_iterator it(m_block_container.begin());
  573. if(it != itend){
  574. for(++it; it != itend; ++it){
  575. const_block_iterator prev(it);
  576. --prev;
  577. BOOST_ASSERT(*prev < *it);
  578. (void)prev; (void)it;
  579. }
  580. }
  581. }
  582. { //Check that the total free nodes are correct
  583. const_block_iterator it(m_block_container.cbegin());
  584. size_type total_free_nodes = 0;
  585. for(; it != itend; ++it){
  586. total_free_nodes += it->free_nodes.size();
  587. }
  588. BOOST_ASSERT(total_free_nodes >= m_totally_free_blocks*m_real_num_node);
  589. }
  590. { //Check that the total totally free blocks are correct
  591. BOOST_ASSERT(m_block_container.size() >= m_totally_free_blocks);
  592. const_block_iterator it = m_block_container.cend();
  593. size_type total_free_blocks = m_totally_free_blocks;
  594. while(total_free_blocks--){
  595. BOOST_ASSERT((--it)->free_nodes.size() == m_real_num_node);
  596. }
  597. }
  598. if(!AlignOnly){
  599. //Check that header offsets are correct
  600. const_block_iterator it = m_block_container.begin();
  601. for(; it != itend; ++it){
  602. hdr_offset_holder *hdr_off_holder = this->priv_first_subblock_from_block(const_cast<block_info_t *>(&*it));
  603. for(size_type i = 0, max = m_num_subblocks; i < max; ++i){
  604. const size_type offset = reinterpret_cast<char*>(const_cast<block_info_t *>(&*it)) - reinterpret_cast<char*>(hdr_off_holder);
  605. BOOST_ASSERT(hdr_off_holder->hdr_offset == offset);
  606. BOOST_ASSERT(0 == ((size_type)hdr_off_holder & (m_real_block_alignment - 1)));
  607. BOOST_ASSERT(0 == (hdr_off_holder->hdr_offset & (m_real_block_alignment - 1)));
  608. hdr_off_holder = reinterpret_cast<hdr_offset_holder *>(reinterpret_cast<char*>(hdr_off_holder) + m_real_block_alignment);
  609. }
  610. }
  611. }
  612. }
  613. #else
  614. {} //empty
  615. #endif
  616. //!Deallocates all used memory. Never throws
  617. void priv_clear()
  618. {
  619. #ifndef NDEBUG
  620. block_iterator it = m_block_container.begin();
  621. block_iterator itend = m_block_container.end();
  622. size_type n_free_nodes = 0;
  623. for(; it != itend; ++it){
  624. //Check for memory leak
  625. BOOST_ASSERT(it->free_nodes.size() == m_real_num_node);
  626. ++n_free_nodes;
  627. }
  628. BOOST_ASSERT(n_free_nodes == m_totally_free_blocks);
  629. #endif
  630. //Check for memory leaks
  631. this->priv_invariants();
  632. multiallocation_chain chain;
  633. m_block_container.clear_and_dispose(block_destroyer(this, chain));
  634. this->mp_segment_mngr_base->deallocate_many(chain);
  635. m_totally_free_blocks = 0;
  636. }
  637. block_info_t *priv_block_from_node(void *node, AlignOnlyFalse) const
  638. {
  639. hdr_offset_holder *hdr_off_holder =
  640. reinterpret_cast<hdr_offset_holder*>((std::size_t)node & size_type(~(m_real_block_alignment - 1)));
  641. BOOST_ASSERT(0 == ((std::size_t)hdr_off_holder & (m_real_block_alignment - 1)));
  642. BOOST_ASSERT(0 == (hdr_off_holder->hdr_offset & (m_real_block_alignment - 1)));
  643. block_info_t *block = reinterpret_cast<block_info_t *>
  644. (reinterpret_cast<char*>(hdr_off_holder) + hdr_off_holder->hdr_offset);
  645. BOOST_ASSERT(block->hdr_offset == 0);
  646. return block;
  647. }
  648. block_info_t *priv_block_from_node(void *node, AlignOnlyTrue) const
  649. {
  650. return (block_info_t *)((std::size_t)node & std::size_t(~(m_real_block_alignment - 1)));
  651. }
  652. block_info_t *priv_block_from_node(void *node) const
  653. { return this->priv_block_from_node(node, IsAlignOnly()); }
  654. hdr_offset_holder *priv_first_subblock_from_block(block_info_t *block) const
  655. { return this->priv_first_subblock_from_block(block, IsAlignOnly()); }
  656. hdr_offset_holder *priv_first_subblock_from_block(block_info_t *block, AlignOnlyFalse) const
  657. {
  658. hdr_offset_holder *const hdr_off_holder = reinterpret_cast<hdr_offset_holder*>
  659. (reinterpret_cast<char*>(block) - (m_num_subblocks-1)*m_real_block_alignment);
  660. BOOST_ASSERT(hdr_off_holder->hdr_offset == size_type(reinterpret_cast<char*>(block) - reinterpret_cast<char*>(hdr_off_holder)));
  661. BOOST_ASSERT(0 == ((std::size_t)hdr_off_holder & (m_real_block_alignment - 1)));
  662. BOOST_ASSERT(0 == (hdr_off_holder->hdr_offset & (m_real_block_alignment - 1)));
  663. return hdr_off_holder;
  664. }
  665. hdr_offset_holder *priv_first_subblock_from_block(block_info_t *block, AlignOnlyTrue) const
  666. {
  667. return reinterpret_cast<hdr_offset_holder*>(block);
  668. }
  669. void priv_dispatch_block_chain_or_free
  670. ( multiallocation_chain &chain, block_info_t &c_info, size_type num_node
  671. , char *mem_address, size_type total_elements, bool insert_block_if_free)
  672. {
  673. BOOST_ASSERT(chain.size() <= total_elements);
  674. //First add all possible nodes to the chain
  675. const size_type left = total_elements - chain.size();
  676. const size_type max_chain = (num_node < left) ? num_node : left;
  677. mem_address = static_cast<char *>(container_detail::to_raw_pointer
  678. (chain.incorporate_after(chain.last(), void_pointer(mem_address), m_real_node_size, max_chain)));
  679. //Now store remaining nodes in the free list
  680. if(const size_type max_free = num_node - max_chain){
  681. free_nodes_t & free_nodes = c_info.free_nodes;
  682. free_nodes.incorporate_after(free_nodes.last(), void_pointer(mem_address), m_real_node_size, max_free);
  683. if(insert_block_if_free){
  684. m_block_container.push_front(c_info);
  685. }
  686. }
  687. }
  688. //!Allocates a several blocks of nodes. Can throw
  689. void priv_append_from_new_blocks(size_type min_elements, multiallocation_chain &chain, AlignOnlyTrue)
  690. {
  691. BOOST_ASSERT(m_block_container.empty());
  692. BOOST_ASSERT(min_elements > 0);
  693. const size_type n = (min_elements - 1)/m_real_num_node + 1;
  694. const size_type real_block_size = m_real_block_alignment - PayloadPerAllocation;
  695. const size_type total_elements = chain.size() + min_elements;
  696. for(size_type i = 0; i != n; ++i){
  697. //We allocate a new NodeBlock and put it the last
  698. //element of the tree
  699. char *mem_address = static_cast<char*>
  700. (mp_segment_mngr_base->allocate_aligned(real_block_size, m_real_block_alignment));
  701. if(!mem_address){
  702. //In case of error, free memory deallocating all nodes (the new ones allocated
  703. //in this function plus previously stored nodes in chain).
  704. this->deallocate_nodes(chain);
  705. throw_bad_alloc();
  706. }
  707. block_info_t &c_info = *new(mem_address)block_info_t();
  708. mem_address += HdrSize;
  709. if(i != (n-1)){
  710. chain.incorporate_after(chain.last(), void_pointer(mem_address), m_real_node_size, m_real_num_node);
  711. }
  712. else{
  713. this->priv_dispatch_block_chain_or_free(chain, c_info, m_real_num_node, mem_address, total_elements, true);
  714. }
  715. }
  716. }
  717. void priv_append_from_new_blocks(size_type min_elements, multiallocation_chain &chain, AlignOnlyFalse)
  718. {
  719. BOOST_ASSERT(m_block_container.empty());
  720. BOOST_ASSERT(min_elements > 0);
  721. const size_type n = (min_elements - 1)/m_real_num_node + 1;
  722. const size_type real_block_size = m_real_block_alignment*m_num_subblocks - PayloadPerAllocation;
  723. const size_type elements_per_subblock = (m_real_block_alignment - HdrOffsetSize)/m_real_node_size;
  724. const size_type hdr_subblock_elements = (m_real_block_alignment - HdrSize - PayloadPerAllocation)/m_real_node_size;
  725. const size_type total_elements = chain.size() + min_elements;
  726. for(size_type i = 0; i != n; ++i){
  727. //We allocate a new NodeBlock and put it the last
  728. //element of the tree
  729. char *mem_address = static_cast<char*>
  730. (mp_segment_mngr_base->allocate_aligned(real_block_size, m_real_block_alignment));
  731. if(!mem_address){
  732. //In case of error, free memory deallocating all nodes (the new ones allocated
  733. //in this function plus previously stored nodes in chain).
  734. this->deallocate_nodes(chain);
  735. throw_bad_alloc();
  736. }
  737. //First initialize header information on the last subblock
  738. char *hdr_addr = mem_address + m_real_block_alignment*(m_num_subblocks-1);
  739. block_info_t &c_info = *new(hdr_addr)block_info_t();
  740. //Some structural checks
  741. BOOST_ASSERT(static_cast<void*>(&static_cast<hdr_offset_holder&>(c_info).hdr_offset) ==
  742. static_cast<void*>(&c_info)); (void)c_info;
  743. if(i != (n-1)){
  744. for( size_type subblock = 0, maxsubblock = m_num_subblocks - 1
  745. ; subblock < maxsubblock
  746. ; ++subblock, mem_address += m_real_block_alignment){
  747. //Initialize header offset mark
  748. new(mem_address) hdr_offset_holder(size_type(hdr_addr - mem_address));
  749. chain.incorporate_after
  750. (chain.last(), void_pointer(mem_address + HdrOffsetSize), m_real_node_size, elements_per_subblock);
  751. }
  752. chain.incorporate_after(chain.last(), void_pointer(hdr_addr + HdrSize), m_real_node_size, hdr_subblock_elements);
  753. }
  754. else{
  755. for( size_type subblock = 0, maxsubblock = m_num_subblocks - 1
  756. ; subblock < maxsubblock
  757. ; ++subblock, mem_address += m_real_block_alignment){
  758. //Initialize header offset mark
  759. new(mem_address) hdr_offset_holder(size_type(hdr_addr - mem_address));
  760. this->priv_dispatch_block_chain_or_free
  761. (chain, c_info, elements_per_subblock, mem_address + HdrOffsetSize, total_elements, false);
  762. }
  763. this->priv_dispatch_block_chain_or_free
  764. (chain, c_info, hdr_subblock_elements, hdr_addr + HdrSize, total_elements, true);
  765. }
  766. }
  767. }
  768. private:
  769. typedef typename boost::intrusive::pointer_traits
  770. <void_pointer>::template rebind_pointer<segment_manager_base_type>::type segment_mngr_base_ptr_t;
  771. const size_type m_max_free_blocks;
  772. const size_type m_real_node_size;
  773. //Round the size to a power of two value.
  774. //This is the total memory size (including payload) that we want to
  775. //allocate from the general-purpose allocator
  776. const size_type m_real_block_alignment;
  777. size_type m_num_subblocks;
  778. //This is the real number of nodes per block
  779. //const
  780. size_type m_real_num_node;
  781. segment_mngr_base_ptr_t mp_segment_mngr_base; //Segment manager
  782. block_container_t m_block_container; //Intrusive block list
  783. size_type m_totally_free_blocks; //Free blocks
  784. };
  785. } //namespace container_detail {
  786. } //namespace container {
  787. } //namespace boost {
  788. #include <boost/container/detail/config_end.hpp>
  789. #endif //#ifndef BOOST_CONTAINER_DETAIL_ADAPTIVE_NODE_POOL_IMPL_HPP