auto_buffer.hpp 36 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138
  1. // Copyright Thorsten Ottosen, 2009.
  2. // Distributed under the Boost Software License, Version 1.0. (See
  3. // accompanying file LICENSE_1_0.txt or copy at
  4. // http://www.boost.org/LICENSE_1_0.txt)
  5. #ifndef BOOST_SIGNALS2_DETAIL_AUTO_BUFFER_HPP_25_02_2009
  6. #define BOOST_SIGNALS2_DETAIL_AUTO_BUFFER_HPP_25_02_2009
  7. #include <boost/detail/workaround.hpp>
  8. #if defined(_MSC_VER) && (_MSC_VER >= 1200)
  9. # pragma once
  10. #endif
  11. #if BOOST_WORKAROUND(BOOST_MSVC, >= 1400)
  12. #pragma warning(push)
  13. #pragma warning(disable:4996)
  14. #endif
  15. #include <boost/assert.hpp>
  16. #include <boost/iterator/reverse_iterator.hpp>
  17. #include <boost/iterator/iterator_traits.hpp>
  18. #include <boost/mpl/if.hpp>
  19. #include <boost/multi_index/detail/scope_guard.hpp>
  20. #include <boost/swap.hpp>
  21. #include <boost/throw_exception.hpp>
  22. #include <boost/type_traits/aligned_storage.hpp>
  23. #include <boost/type_traits/alignment_of.hpp>
  24. #include <boost/type_traits/has_nothrow_copy.hpp>
  25. #include <boost/type_traits/has_nothrow_assign.hpp>
  26. #include <boost/type_traits/has_trivial_assign.hpp>
  27. #include <boost/type_traits/has_trivial_constructor.hpp>
  28. #include <boost/type_traits/has_trivial_destructor.hpp>
  29. #include <algorithm>
  30. #include <cstring>
  31. #include <iterator>
  32. #include <memory>
  33. #include <stdexcept>
  34. namespace boost
  35. {
  36. namespace signals2
  37. {
  38. namespace detail
  39. {
  40. //
  41. // Policies for creating the stack buffer.
  42. //
  43. template< unsigned N >
  44. struct store_n_objects
  45. {
  46. BOOST_STATIC_CONSTANT( unsigned, value = N );
  47. };
  48. template< unsigned N >
  49. struct store_n_bytes
  50. {
  51. BOOST_STATIC_CONSTANT( unsigned, value = N );
  52. };
  53. namespace auto_buffer_detail
  54. {
  55. template< class Policy, class T >
  56. struct compute_buffer_size
  57. {
  58. BOOST_STATIC_CONSTANT( unsigned, value = Policy::value * sizeof(T) );
  59. };
  60. template< unsigned N, class T >
  61. struct compute_buffer_size< store_n_bytes<N>, T >
  62. {
  63. BOOST_STATIC_CONSTANT( unsigned, value = N );
  64. };
  65. template< class Policy, class T >
  66. struct compute_buffer_objects
  67. {
  68. BOOST_STATIC_CONSTANT( unsigned, value = Policy::value );
  69. };
  70. template< unsigned N, class T >
  71. struct compute_buffer_objects< store_n_bytes<N>, T >
  72. {
  73. BOOST_STATIC_CONSTANT( unsigned, value = N / sizeof(T) );
  74. };
  75. }
  76. struct default_grow_policy
  77. {
  78. template< class SizeType >
  79. static SizeType new_capacity( SizeType capacity )
  80. {
  81. //
  82. // @remark: we grow the capacity quite agressively.
  83. // this is justified since we aim to minimize
  84. // heap-allocations, and because we mostly use
  85. // the buffer locally.
  86. return capacity * 4u;
  87. }
  88. template< class SizeType >
  89. static bool should_shrink( SizeType size, SizeType capacity )
  90. {
  91. //
  92. // @remark: when defining a new grow policy, one might
  93. // choose that if the waated space is less
  94. // than a certain percentage, then it is of
  95. // little use to shrink.
  96. //
  97. return true;
  98. }
  99. };
  100. template< class T,
  101. class StackBufferPolicy = store_n_objects<256>,
  102. class GrowPolicy = default_grow_policy,
  103. class Allocator = std::allocator<T> >
  104. class auto_buffer;
  105. template
  106. <
  107. class T,
  108. class StackBufferPolicy,
  109. class GrowPolicy,
  110. class Allocator
  111. >
  112. class auto_buffer : Allocator
  113. {
  114. private:
  115. enum { N = auto_buffer_detail::
  116. compute_buffer_objects<StackBufferPolicy,T>::value };
  117. BOOST_STATIC_CONSTANT( bool, is_stack_buffer_empty = N == 0u );
  118. typedef auto_buffer<T, store_n_objects<0>, GrowPolicy, Allocator>
  119. local_buffer;
  120. public:
  121. typedef Allocator allocator_type;
  122. typedef T value_type;
  123. typedef typename Allocator::size_type size_type;
  124. typedef typename Allocator::difference_type difference_type;
  125. typedef T* pointer;
  126. typedef typename Allocator::pointer allocator_pointer;
  127. typedef const T* const_pointer;
  128. typedef T& reference;
  129. typedef const T& const_reference;
  130. typedef pointer iterator;
  131. typedef const_pointer const_iterator;
  132. typedef boost::reverse_iterator<iterator> reverse_iterator;
  133. typedef boost::reverse_iterator<const_iterator> const_reverse_iterator;
  134. typedef typename boost::mpl::if_c< boost::has_trivial_assign<T>::value
  135. && sizeof(T) <= sizeof(long double),
  136. const value_type,
  137. const_reference >::type
  138. optimized_const_reference;
  139. private:
  140. pointer allocate( size_type capacity_arg )
  141. {
  142. if( capacity_arg > N )
  143. return &*get_allocator().allocate( capacity_arg );
  144. else
  145. return static_cast<T*>( members_.address() );
  146. }
  147. void deallocate( pointer where, size_type capacity_arg )
  148. {
  149. if( capacity_arg <= N )
  150. return;
  151. get_allocator().deallocate( allocator_pointer(where), capacity_arg );
  152. }
  153. template< class I >
  154. static void copy_impl( I begin, I end, pointer where, std::random_access_iterator_tag )
  155. {
  156. copy_rai( begin, end, where, boost::has_trivial_assign<T>() );
  157. }
  158. static void copy_rai( const T* begin, const T* end,
  159. pointer where, const boost::true_type& )
  160. {
  161. std::memcpy( where, begin, sizeof(T) * std::distance(begin,end) );
  162. }
  163. template< class I, bool b >
  164. static void copy_rai( I begin, I end,
  165. pointer where, const boost::integral_constant<bool, b>& )
  166. {
  167. std::uninitialized_copy( begin, end, where );
  168. }
  169. template< class I >
  170. static void copy_impl( I begin, I end, pointer where, std::bidirectional_iterator_tag )
  171. {
  172. std::uninitialized_copy( begin, end, where );
  173. }
  174. template< class I >
  175. static void copy_impl( I begin, I end, pointer where )
  176. {
  177. copy_impl( begin, end, where,
  178. typename std::iterator_traits<I>::iterator_category() );
  179. }
  180. template< class I, class I2 >
  181. static void assign_impl( I begin, I end, I2 where )
  182. {
  183. assign_impl( begin, end, where, boost::has_trivial_assign<T>() );
  184. }
  185. template< class I, class I2 >
  186. static void assign_impl( I begin, I end, I2 where, const boost::true_type& )
  187. {
  188. std::memcpy( where, begin, sizeof(T) * std::distance(begin,end) );
  189. }
  190. template< class I, class I2 >
  191. static void assign_impl( I begin, I end, I2 where, const boost::false_type& )
  192. {
  193. for( ; begin != end; ++begin, ++where )
  194. *where = *begin;
  195. }
  196. void unchecked_push_back_n( size_type n, const boost::true_type& )
  197. {
  198. std::uninitialized_fill( end(), end() + n, T() );
  199. size_ += n;
  200. }
  201. void unchecked_push_back_n( size_type n, const boost::false_type& )
  202. {
  203. for( size_type i = 0u; i < n; ++i )
  204. unchecked_push_back();
  205. }
  206. void auto_buffer_destroy( pointer where, const boost::false_type& )
  207. {
  208. (*where).~T();
  209. }
  210. void auto_buffer_destroy( pointer, const boost::true_type& )
  211. { }
  212. void auto_buffer_destroy( pointer where )
  213. {
  214. auto_buffer_destroy( where, boost::has_trivial_destructor<T>() );
  215. }
  216. void destroy_back_n( size_type n, const boost::false_type& )
  217. {
  218. BOOST_ASSERT( n > 0 );
  219. pointer buffer = buffer_ + size_ - 1u;
  220. pointer new_end = buffer - n;
  221. for( ; buffer > new_end; --buffer )
  222. auto_buffer_destroy( buffer );
  223. }
  224. void destroy_back_n( size_type n, const boost::true_type& )
  225. { }
  226. void destroy_back_n( size_type n )
  227. {
  228. destroy_back_n( n, boost::has_trivial_destructor<T>() );
  229. }
  230. void auto_buffer_destroy( const boost::false_type& x )
  231. {
  232. if( size_ )
  233. destroy_back_n( size_, x );
  234. deallocate( buffer_, members_.capacity_ );
  235. }
  236. void auto_buffer_destroy( const boost::true_type& )
  237. {
  238. deallocate( buffer_, members_.capacity_ );
  239. }
  240. pointer move_to_new_buffer( size_type new_capacity, const boost::false_type& )
  241. {
  242. pointer new_buffer = allocate( new_capacity ); // strong
  243. boost::multi_index::detail::scope_guard guard =
  244. boost::multi_index::detail::make_obj_guard( *this,
  245. &auto_buffer::deallocate,
  246. new_buffer,
  247. new_capacity );
  248. copy_impl( begin(), end(), new_buffer ); // strong
  249. guard.dismiss(); // nothrow
  250. return new_buffer;
  251. }
  252. pointer move_to_new_buffer( size_type new_capacity, const boost::true_type& )
  253. {
  254. pointer new_buffer = allocate( new_capacity ); // strong
  255. copy_impl( begin(), end(), new_buffer ); // nothrow
  256. return new_buffer;
  257. }
  258. void reserve_impl( size_type new_capacity )
  259. {
  260. pointer new_buffer = move_to_new_buffer( new_capacity,
  261. boost::has_nothrow_copy<T>() );
  262. (*this).~auto_buffer();
  263. buffer_ = new_buffer;
  264. members_.capacity_ = new_capacity;
  265. BOOST_ASSERT( size_ <= members_.capacity_ );
  266. }
  267. size_type new_capacity_impl( size_type n )
  268. {
  269. BOOST_ASSERT( n > members_.capacity_ );
  270. size_type new_capacity = GrowPolicy::new_capacity( members_.capacity_ );
  271. // @todo: consider to check for allocator.max_size()
  272. return (std::max)(new_capacity,n);
  273. }
  274. static void swap_helper( auto_buffer& l, auto_buffer& r,
  275. const boost::true_type& )
  276. {
  277. BOOST_ASSERT( l.is_on_stack() && r.is_on_stack() );
  278. auto_buffer temp( l.begin(), l.end() );
  279. assign_impl( r.begin(), r.end(), l.begin() );
  280. assign_impl( temp.begin(), temp.end(), r.begin() );
  281. boost::swap( l.size_, r.size_ );
  282. boost::swap( l.members_.capacity_, r.members_.capacity_ );
  283. }
  284. static void swap_helper( auto_buffer& l, auto_buffer& r,
  285. const boost::false_type& )
  286. {
  287. BOOST_ASSERT( l.is_on_stack() && r.is_on_stack() );
  288. size_type min_size = (std::min)(l.size_,r.size_);
  289. size_type max_size = (std::max)(l.size_,r.size_);
  290. size_type diff = max_size - min_size;
  291. auto_buffer* smallest = l.size_ == min_size ? &l : &r;
  292. auto_buffer* largest = smallest == &l ? &r : &l;
  293. // @remark: the implementation below is not as fast
  294. // as it could be if we assumed T had a default
  295. // constructor.
  296. size_type i = 0u;
  297. for( ; i < min_size; ++i )
  298. boost::swap( (*smallest)[i], (*largest)[i] );
  299. for( ; i < max_size; ++i )
  300. smallest->unchecked_push_back( (*largest)[i] );
  301. largest->pop_back_n( diff );
  302. boost::swap( l.members_.capacity_, r.members_.capacity_ );
  303. }
  304. void one_sided_swap( auto_buffer& temp ) // nothrow
  305. {
  306. BOOST_ASSERT( !temp.is_on_stack() );
  307. this->~auto_buffer();
  308. // @remark: must be nothrow
  309. get_allocator() = temp.get_allocator();
  310. members_.capacity_ = temp.members_.capacity_;
  311. buffer_ = temp.buffer_;
  312. BOOST_ASSERT( temp.size_ >= size_ + 1u );
  313. size_ = temp.size_;
  314. temp.buffer_ = 0;
  315. BOOST_ASSERT( temp.is_valid() );
  316. }
  317. template< class I >
  318. void insert_impl( const_iterator before, I begin_arg, I end_arg,
  319. std::input_iterator_tag )
  320. {
  321. for( ; begin_arg != end_arg; ++begin_arg )
  322. {
  323. before = insert( before, *begin_arg );
  324. ++before;
  325. }
  326. }
  327. void grow_back( size_type n, const boost::true_type& )
  328. {
  329. BOOST_ASSERT( size_ + n <= members_.capacity_ );
  330. size_ += n;
  331. }
  332. void grow_back( size_type n, const boost::false_type& )
  333. {
  334. unchecked_push_back_n(n);
  335. }
  336. void grow_back( size_type n )
  337. {
  338. grow_back( n, boost::has_trivial_constructor<T>() );
  339. }
  340. void grow_back_one( const boost::true_type& )
  341. {
  342. BOOST_ASSERT( size_ + 1 <= members_.capacity_ );
  343. size_ += 1;
  344. }
  345. void grow_back_one( const boost::false_type& )
  346. {
  347. unchecked_push_back();
  348. }
  349. void grow_back_one()
  350. {
  351. grow_back_one( boost::has_trivial_constructor<T>() );
  352. }
  353. template< class I >
  354. void insert_impl( const_iterator before, I begin_arg, I end_arg,
  355. std::forward_iterator_tag )
  356. {
  357. difference_type n = std::distance(begin_arg, end_arg);
  358. if( size_ + n <= members_.capacity_ )
  359. {
  360. bool is_back_insertion = before == cend();
  361. if( !is_back_insertion )
  362. {
  363. grow_back( n );
  364. iterator where = const_cast<T*>(before);
  365. std::copy( before, cend() - n, where + n );
  366. assign_impl( begin_arg, end_arg, where );
  367. }
  368. else
  369. {
  370. unchecked_push_back( begin_arg, end_arg );
  371. }
  372. BOOST_ASSERT( is_valid() );
  373. return;
  374. }
  375. auto_buffer temp( new_capacity_impl( size_ + n ) );
  376. temp.unchecked_push_back( cbegin(), before );
  377. temp.unchecked_push_back( begin_arg, end_arg );
  378. temp.unchecked_push_back( before, cend() );
  379. one_sided_swap( temp );
  380. BOOST_ASSERT( is_valid() );
  381. }
  382. public:
  383. bool is_valid() const // invariant
  384. {
  385. // @remark: allowed for N==0 and when
  386. // using a locally instance
  387. // in insert()/one_sided_swap()
  388. if( buffer_ == 0 )
  389. return true;
  390. if( members_.capacity_ < N )
  391. return false;
  392. if( !is_on_stack() && members_.capacity_ <= N )
  393. return false;
  394. if( buffer_ == members_.address() )
  395. if( members_.capacity_ > N )
  396. return false;
  397. if( size_ > members_.capacity_ )
  398. return false;
  399. return true;
  400. }
  401. auto_buffer()
  402. : members_( N ),
  403. buffer_( static_cast<T*>(members_.address()) ),
  404. size_( 0u )
  405. {
  406. BOOST_ASSERT( is_valid() );
  407. }
  408. auto_buffer( const auto_buffer& r )
  409. : members_( (std::max)(r.size_,size_type(N)) ),
  410. buffer_( allocate( members_.capacity_ ) ),
  411. size_( 0 )
  412. {
  413. copy_impl( r.begin(), r.end(), buffer_ );
  414. size_ = r.size_;
  415. BOOST_ASSERT( is_valid() );
  416. }
  417. auto_buffer& operator=( const auto_buffer& r ) // basic
  418. {
  419. if( this == &r )
  420. return *this;
  421. difference_type diff = size_ - r.size_;
  422. if( diff >= 0 )
  423. {
  424. pop_back_n( static_cast<size_type>(diff) );
  425. assign_impl( r.begin(), r.end(), begin() );
  426. }
  427. else
  428. {
  429. if( members_.capacity_ >= r.size() )
  430. {
  431. unchecked_push_back_n( static_cast<size_type>(-diff) );
  432. assign_impl( r.begin(), r.end(), begin() );
  433. }
  434. else
  435. {
  436. // @remark: we release memory as early as possible
  437. // since we only give the basic guarantee
  438. (*this).~auto_buffer();
  439. buffer_ = 0;
  440. pointer new_buffer = allocate( r.size() );
  441. boost::multi_index::detail::scope_guard guard =
  442. boost::multi_index::detail::make_obj_guard( *this,
  443. &auto_buffer::deallocate,
  444. new_buffer,
  445. r.size() );
  446. copy_impl( r.begin(), r.end(), new_buffer );
  447. guard.dismiss();
  448. buffer_ = new_buffer;
  449. members_.capacity_ = r.size();
  450. size_ = members_.capacity_;
  451. }
  452. }
  453. BOOST_ASSERT( size() == r.size() );
  454. BOOST_ASSERT( is_valid() );
  455. return *this;
  456. }
  457. explicit auto_buffer( size_type capacity_arg )
  458. : members_( (std::max)(capacity_arg, size_type(N)) ),
  459. buffer_( allocate(members_.capacity_) ),
  460. size_( 0 )
  461. {
  462. BOOST_ASSERT( is_valid() );
  463. }
  464. auto_buffer( size_type size_arg, optimized_const_reference init_value )
  465. : members_( (std::max)(size_arg, size_type(N)) ),
  466. buffer_( allocate(members_.capacity_) ),
  467. size_( 0 )
  468. {
  469. std::uninitialized_fill( buffer_, buffer_ + size_arg, init_value );
  470. size_ = size_arg;
  471. BOOST_ASSERT( is_valid() );
  472. }
  473. auto_buffer( size_type capacity_arg, const allocator_type& a )
  474. : allocator_type( a ),
  475. members_( (std::max)(capacity_arg, size_type(N)) ),
  476. buffer_( allocate(members_.capacity_) ),
  477. size_( 0 )
  478. {
  479. BOOST_ASSERT( is_valid() );
  480. }
  481. auto_buffer( size_type size_arg, optimized_const_reference init_value,
  482. const allocator_type& a )
  483. : allocator_type( a ),
  484. members_( (std::max)(size_arg, size_type(N)) ),
  485. buffer_( allocate(members_.capacity_) ),
  486. size_( 0 )
  487. {
  488. std::uninitialized_fill( buffer_, buffer_ + size_arg, init_value );
  489. size_ = size_arg;
  490. BOOST_ASSERT( is_valid() );
  491. }
  492. template< class ForwardIterator >
  493. auto_buffer( ForwardIterator begin_arg, ForwardIterator end_arg )
  494. :
  495. members_( std::distance(begin_arg, end_arg) ),
  496. buffer_( allocate(members_.capacity_) ),
  497. size_( 0 )
  498. {
  499. copy_impl( begin_arg, end_arg, buffer_ );
  500. size_ = members_.capacity_;
  501. if( members_.capacity_ < N )
  502. members_.capacity_ = N;
  503. BOOST_ASSERT( is_valid() );
  504. }
  505. template< class ForwardIterator >
  506. auto_buffer( ForwardIterator begin_arg, ForwardIterator end_arg,
  507. const allocator_type& a )
  508. : allocator_type( a ),
  509. members_( std::distance(begin_arg, end_arg) ),
  510. buffer_( allocate(members_.capacity_) ),
  511. size_( 0 )
  512. {
  513. copy_impl( begin_arg, end_arg, buffer_ );
  514. size_ = members_.capacity_;
  515. if( members_.capacity_ < N )
  516. members_.capacity_ = N;
  517. BOOST_ASSERT( is_valid() );
  518. }
  519. ~auto_buffer()
  520. {
  521. BOOST_ASSERT( is_valid() );
  522. if( buffer_ ) // do we need this check? Yes, but only
  523. // for N = 0u + local instances in one_sided_swap()
  524. auto_buffer_destroy( boost::has_trivial_destructor<T>() );
  525. }
  526. public:
  527. bool empty() const
  528. {
  529. return size_ == 0;
  530. }
  531. bool full() const
  532. {
  533. return size_ == members_.capacity_;
  534. }
  535. bool is_on_stack() const
  536. {
  537. return members_.capacity_ <= N;
  538. }
  539. size_type size() const
  540. {
  541. return size_;
  542. }
  543. size_type capacity() const
  544. {
  545. return members_.capacity_;
  546. }
  547. public:
  548. pointer data()
  549. {
  550. return buffer_;
  551. }
  552. const_pointer data() const
  553. {
  554. return buffer_;
  555. }
  556. allocator_type& get_allocator()
  557. {
  558. return static_cast<allocator_type&>(*this);
  559. }
  560. const allocator_type& get_allocator() const
  561. {
  562. return static_cast<const allocator_type&>(*this);
  563. }
  564. public:
  565. iterator begin()
  566. {
  567. return buffer_;
  568. }
  569. const_iterator begin() const
  570. {
  571. return buffer_;
  572. }
  573. iterator end()
  574. {
  575. return buffer_ + size_;
  576. }
  577. const_iterator end() const
  578. {
  579. return buffer_ + size_;
  580. }
  581. reverse_iterator rbegin()
  582. {
  583. return reverse_iterator(end());
  584. }
  585. const_reverse_iterator rbegin() const
  586. {
  587. return const_reverse_iterator(end());
  588. }
  589. reverse_iterator rend()
  590. {
  591. return reverse_iterator(begin());
  592. }
  593. const_reverse_iterator rend() const
  594. {
  595. return const_reverse_iterator(begin());
  596. }
  597. const_iterator cbegin() const
  598. {
  599. return const_cast<const auto_buffer*>(this)->begin();
  600. }
  601. const_iterator cend() const
  602. {
  603. return const_cast<const auto_buffer*>(this)->end();
  604. }
  605. const_reverse_iterator crbegin() const
  606. {
  607. return const_cast<const auto_buffer*>(this)->rbegin();
  608. }
  609. const_reverse_iterator crend() const
  610. {
  611. return const_cast<const auto_buffer*>(this)->rend();
  612. }
  613. public:
  614. reference front()
  615. {
  616. return buffer_[0];
  617. }
  618. optimized_const_reference front() const
  619. {
  620. return buffer_[0];
  621. }
  622. reference back()
  623. {
  624. return buffer_[size_-1];
  625. }
  626. optimized_const_reference back() const
  627. {
  628. return buffer_[size_-1];
  629. }
  630. reference operator[]( size_type n )
  631. {
  632. BOOST_ASSERT( n < size_ );
  633. return buffer_[n];
  634. }
  635. optimized_const_reference operator[]( size_type n ) const
  636. {
  637. BOOST_ASSERT( n < size_ );
  638. return buffer_[n];
  639. }
  640. void unchecked_push_back()
  641. {
  642. BOOST_ASSERT( !full() );
  643. new (buffer_ + size_) T;
  644. ++size_;
  645. }
  646. void unchecked_push_back_n( size_type n )
  647. {
  648. BOOST_ASSERT( size_ + n <= members_.capacity_ );
  649. unchecked_push_back_n( n, boost::has_trivial_assign<T>() );
  650. }
  651. void unchecked_push_back( optimized_const_reference x ) // non-growing
  652. {
  653. BOOST_ASSERT( !full() );
  654. new (buffer_ + size_) T( x );
  655. ++size_;
  656. }
  657. template< class ForwardIterator >
  658. void unchecked_push_back( ForwardIterator begin_arg,
  659. ForwardIterator end_arg ) // non-growing
  660. {
  661. BOOST_ASSERT( size_ + std::distance(begin_arg, end_arg) <= members_.capacity_ );
  662. copy_impl( begin_arg, end_arg, buffer_ + size_ );
  663. size_ += std::distance(begin_arg, end_arg);
  664. }
  665. void reserve_precisely( size_type n )
  666. {
  667. BOOST_ASSERT( members_.capacity_ >= N );
  668. if( n <= members_.capacity_ )
  669. return;
  670. reserve_impl( n );
  671. BOOST_ASSERT( members_.capacity_ == n );
  672. }
  673. void reserve( size_type n ) // strong
  674. {
  675. BOOST_ASSERT( members_.capacity_ >= N );
  676. if( n <= members_.capacity_ )
  677. return;
  678. reserve_impl( new_capacity_impl( n ) );
  679. BOOST_ASSERT( members_.capacity_ >= n );
  680. }
  681. void push_back()
  682. {
  683. if( size_ != members_.capacity_ )
  684. {
  685. unchecked_push_back();
  686. }
  687. else
  688. {
  689. reserve( size_ + 1u );
  690. unchecked_push_back();
  691. }
  692. }
  693. void push_back( optimized_const_reference x )
  694. {
  695. if( size_ != members_.capacity_ )
  696. {
  697. unchecked_push_back( x );
  698. }
  699. else
  700. {
  701. reserve( size_ + 1u );
  702. unchecked_push_back( x );
  703. }
  704. }
  705. template< class ForwardIterator >
  706. void push_back( ForwardIterator begin_arg, ForwardIterator end_arg )
  707. {
  708. difference_type diff = std::distance(begin_arg, end_arg);
  709. if( size_ + diff > members_.capacity_ )
  710. reserve( size_ + diff );
  711. unchecked_push_back( begin_arg, end_arg );
  712. }
  713. iterator insert( const_iterator before, optimized_const_reference x ) // basic
  714. {
  715. // @todo: consider if we want to support x in 'this'
  716. if( size_ < members_.capacity_ )
  717. {
  718. bool is_back_insertion = before == cend();
  719. iterator where = const_cast<T*>(before);
  720. if( !is_back_insertion )
  721. {
  722. grow_back_one();
  723. std::copy( before, cend() - 1u, where + 1u );
  724. *where = x;
  725. BOOST_ASSERT( is_valid() );
  726. }
  727. else
  728. {
  729. unchecked_push_back( x );
  730. }
  731. return where;
  732. }
  733. auto_buffer temp( new_capacity_impl( size_ + 1u ) );
  734. temp.unchecked_push_back( cbegin(), before );
  735. iterator result = temp.end();
  736. temp.unchecked_push_back( x );
  737. temp.unchecked_push_back( before, cend() );
  738. one_sided_swap( temp );
  739. BOOST_ASSERT( is_valid() );
  740. return result;
  741. }
  742. void insert( const_iterator before, size_type n,
  743. optimized_const_reference x )
  744. {
  745. // @todo: see problems above
  746. if( size_ + n <= members_.capacity_ )
  747. {
  748. grow_back( n );
  749. iterator where = const_cast<T*>(before);
  750. std::copy( before, cend() - n, where + n );
  751. std::fill( where, where + n, x );
  752. BOOST_ASSERT( is_valid() );
  753. return;
  754. }
  755. auto_buffer temp( new_capacity_impl( size_ + n ) );
  756. temp.unchecked_push_back( cbegin(), before );
  757. std::uninitialized_fill_n( temp.end(), n, x );
  758. temp.size_ += n;
  759. temp.unchecked_push_back( before, cend() );
  760. one_sided_swap( temp );
  761. BOOST_ASSERT( is_valid() );
  762. }
  763. template< class ForwardIterator >
  764. void insert( const_iterator before,
  765. ForwardIterator begin_arg, ForwardIterator end_arg ) // basic
  766. {
  767. typedef typename std::iterator_traits<ForwardIterator>
  768. ::iterator_category category;
  769. insert_impl( before, begin_arg, end_arg, category() );
  770. }
  771. void pop_back()
  772. {
  773. BOOST_ASSERT( !empty() );
  774. auto_buffer_destroy( buffer_ + size_ - 1, boost::has_trivial_destructor<T>() );
  775. --size_;
  776. }
  777. void pop_back_n( size_type n )
  778. {
  779. BOOST_ASSERT( n <= size_ );
  780. if( n )
  781. {
  782. destroy_back_n( n );
  783. size_ -= n;
  784. }
  785. }
  786. void clear()
  787. {
  788. pop_back_n( size_ );
  789. }
  790. iterator erase( const_iterator where )
  791. {
  792. BOOST_ASSERT( !empty() );
  793. BOOST_ASSERT( cbegin() <= where );
  794. BOOST_ASSERT( cend() > where );
  795. unsigned elements = cend() - where - 1u;
  796. if( elements > 0u )
  797. {
  798. const_iterator start = where + 1u;
  799. std::copy( start, start + elements,
  800. const_cast<T*>(where) );
  801. }
  802. pop_back();
  803. BOOST_ASSERT( !full() );
  804. iterator result = const_cast<T*>( where );
  805. BOOST_ASSERT( result <= end() );
  806. return result;
  807. }
  808. iterator erase( const_iterator from, const_iterator to )
  809. {
  810. BOOST_ASSERT( !(std::distance(from,to)>0) ||
  811. !empty() );
  812. BOOST_ASSERT( cbegin() <= from );
  813. BOOST_ASSERT( cend() >= to );
  814. unsigned elements = std::distance(to,cend());
  815. if( elements > 0u )
  816. {
  817. BOOST_ASSERT( elements > 0u );
  818. std::copy( to, to + elements,
  819. const_cast<T*>(from) );
  820. }
  821. pop_back_n( std::distance(from,to) );
  822. BOOST_ASSERT( !full() );
  823. iterator result = const_cast<T*>( from );
  824. BOOST_ASSERT( result <= end() );
  825. return result;
  826. }
  827. void shrink_to_fit()
  828. {
  829. if( is_on_stack() || !GrowPolicy::should_shrink(size_,members_.capacity_) )
  830. return;
  831. reserve_impl( size_ );
  832. members_.capacity_ = (std::max)(size_type(N),members_.capacity_);
  833. BOOST_ASSERT( is_on_stack() || size_ == members_.capacity_ );
  834. BOOST_ASSERT( !is_on_stack() || size_ <= members_.capacity_ );
  835. }
  836. pointer uninitialized_grow( size_type n ) // strong
  837. {
  838. if( size_ + n <= members_.capacity_ )
  839. reserve( size_ + n );
  840. pointer res = end();
  841. size_ += n;
  842. return res;
  843. }
  844. void uninitialized_shrink( size_type n ) // nothrow
  845. {
  846. // @remark: test for wrap-around
  847. BOOST_ASSERT( size_ - n <= members_.capacity_ );
  848. size_ -= n;
  849. }
  850. void uninitialized_resize( size_type n )
  851. {
  852. if( n > size() )
  853. uninitialized_grow( n - size() );
  854. else if( n < size() )
  855. uninitialized_shrink( size() - n );
  856. BOOST_ASSERT( size() == n );
  857. }
  858. // nothrow - if both buffer are on the heap, or
  859. // - if one buffer is on the heap and one has
  860. // 'has_allocated_buffer() == false', or
  861. // - if copy-construction cannot throw
  862. // basic - otherwise (better guarantee impossible)
  863. // requirement: the allocator must be no-throw-swappable
  864. void swap( auto_buffer& r )
  865. {
  866. bool on_stack = is_on_stack();
  867. bool r_on_stack = r.is_on_stack();
  868. bool both_on_heap = !on_stack && !r_on_stack;
  869. if( both_on_heap )
  870. {
  871. boost::swap( get_allocator(), r.get_allocator() );
  872. boost::swap( members_.capacity_, r.members_.capacity_ );
  873. boost::swap( buffer_, r.buffer_ );
  874. boost::swap( size_, r.size_ );
  875. BOOST_ASSERT( is_valid() );
  876. BOOST_ASSERT( r.is_valid() );
  877. return;
  878. }
  879. BOOST_ASSERT( on_stack || r_on_stack );
  880. bool exactly_one_on_stack = (on_stack && !r_on_stack) ||
  881. (!on_stack && r_on_stack);
  882. //
  883. // Remark: we now know that we can copy into
  884. // the unused stack buffer.
  885. //
  886. if( exactly_one_on_stack )
  887. {
  888. auto_buffer* one_on_stack = on_stack ? this : &r;
  889. auto_buffer* other = on_stack ? &r : this;
  890. pointer new_buffer = static_cast<T*>(other->members_.address());
  891. copy_impl( one_on_stack->begin(), one_on_stack->end(),
  892. new_buffer ); // strong
  893. one_on_stack->~auto_buffer(); // nothrow
  894. boost::swap( get_allocator(), r.get_allocator() ); // assume nothrow
  895. boost::swap( members_.capacity_, r.members_.capacity_ );
  896. boost::swap( size_, r.size_ );
  897. one_on_stack->buffer_ = other->buffer_;
  898. other->buffer_ = new_buffer;
  899. BOOST_ASSERT( other->is_on_stack() );
  900. BOOST_ASSERT( !one_on_stack->is_on_stack() );
  901. BOOST_ASSERT( is_valid() );
  902. BOOST_ASSERT( r.is_valid() );
  903. return;
  904. }
  905. BOOST_ASSERT( on_stack && r_on_stack );
  906. swap_helper( *this, r, boost::has_trivial_assign<T>() );
  907. BOOST_ASSERT( is_valid() );
  908. BOOST_ASSERT( r.is_valid() );
  909. }
  910. private:
  911. typedef boost::aligned_storage< N * sizeof(T),
  912. boost::alignment_of<T>::value >
  913. storage;
  914. struct members_type : storage /* to enable EBO */
  915. {
  916. size_type capacity_;
  917. members_type( size_type capacity )
  918. : capacity_(capacity)
  919. { }
  920. void* address() const
  921. { return const_cast<storage&>(static_cast<const storage&>(*this)).address(); }
  922. };
  923. members_type members_;
  924. pointer buffer_;
  925. size_type size_;
  926. };
  927. template< class T, class SBP, class GP, class A >
  928. inline void swap( auto_buffer<T,SBP,GP,A>& l, auto_buffer<T,SBP,GP,A>& r )
  929. {
  930. l.swap( r );
  931. }
  932. template< class T, class SBP, class GP, class A >
  933. inline bool operator==( const auto_buffer<T,SBP,GP,A>& l,
  934. const auto_buffer<T,SBP,GP,A>& r )
  935. {
  936. if( l.size() != r.size() )
  937. return false;
  938. return std::equal( l.begin(), l.end(), r.begin() );
  939. }
  940. template< class T, class SBP, class GP, class A >
  941. inline bool operator!=( const auto_buffer<T,SBP,GP,A>& l,
  942. const auto_buffer<T,SBP,GP,A>& r )
  943. {
  944. return !(l == r);
  945. }
  946. template< class T, class SBP, class GP, class A >
  947. inline bool operator<( const auto_buffer<T,SBP,GP,A>& l,
  948. const auto_buffer<T,SBP,GP,A>& r )
  949. {
  950. return std::lexicographical_compare( l.begin(), l.end(),
  951. r.begin(), r.end() );
  952. }
  953. template< class T, class SBP, class GP, class A >
  954. inline bool operator>( const auto_buffer<T,SBP,GP,A>& l,
  955. const auto_buffer<T,SBP,GP,A>& r )
  956. {
  957. return (r < l);
  958. }
  959. template< class T, class SBP, class GP, class A >
  960. inline bool operator<=( const auto_buffer<T,SBP,GP,A>& l,
  961. const auto_buffer<T,SBP,GP,A>& r )
  962. {
  963. return !(r > l);
  964. }
  965. template< class T, class SBP, class GP, class A >
  966. inline bool operator>=( const auto_buffer<T,SBP,GP,A>& l,
  967. const auto_buffer<T,SBP,GP,A>& r )
  968. {
  969. return !(l < r);
  970. }
  971. } // namespace detail
  972. } // namespace signals2
  973. }
  974. #if BOOST_WORKAROUND(BOOST_MSVC, >= 1400)
  975. #pragma warning(pop)
  976. #endif
  977. #endif