auto_buffer.hpp 36 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143
  1. // Copyright Thorsten Ottosen, 2009.
  2. // Distributed under the Boost Software License, Version 1.0. (See
  3. // accompanying file LICENSE_1_0.txt or copy at
  4. // http://www.boost.org/LICENSE_1_0.txt)
  5. #ifndef BOOST_SIGNALS2_DETAIL_AUTO_BUFFER_HPP_25_02_2009
  6. #define BOOST_SIGNALS2_DETAIL_AUTO_BUFFER_HPP_25_02_2009
  7. #include <boost/detail/workaround.hpp>
  8. #if defined(_MSC_VER)
  9. # pragma once
  10. #endif
  11. #if BOOST_WORKAROUND(BOOST_MSVC, >= 1400)
  12. #pragma warning(push)
  13. #pragma warning(disable:4996)
  14. #endif
  15. #include <boost/assert.hpp>
  16. #include <boost/iterator/reverse_iterator.hpp>
  17. #include <boost/iterator/iterator_traits.hpp>
  18. #include <boost/mpl/if.hpp>
  19. #include <boost/multi_index/detail/scope_guard.hpp>
  20. #include <boost/swap.hpp>
  21. #include <boost/type_traits/aligned_storage.hpp>
  22. #include <boost/type_traits/alignment_of.hpp>
  23. #include <boost/type_traits/has_nothrow_copy.hpp>
  24. #include <boost/type_traits/has_nothrow_assign.hpp>
  25. #include <boost/type_traits/has_trivial_assign.hpp>
  26. #include <boost/type_traits/has_trivial_constructor.hpp>
  27. #include <boost/type_traits/has_trivial_destructor.hpp>
  28. #include <algorithm>
  29. #include <cstring>
  30. #include <iterator>
  31. #include <memory>
  32. #include <stdexcept>
  33. namespace boost
  34. {
  35. namespace signals2
  36. {
  37. namespace detail
  38. {
  39. //
  40. // Policies for creating the stack buffer.
  41. //
  42. template< unsigned N >
  43. struct store_n_objects
  44. {
  45. BOOST_STATIC_CONSTANT( unsigned, value = N );
  46. };
  47. template< unsigned N >
  48. struct store_n_bytes
  49. {
  50. BOOST_STATIC_CONSTANT( unsigned, value = N );
  51. };
  52. namespace auto_buffer_detail
  53. {
  54. template< class Policy, class T >
  55. struct compute_buffer_size
  56. {
  57. BOOST_STATIC_CONSTANT( unsigned, value = Policy::value * sizeof(T) );
  58. };
  59. template< unsigned N, class T >
  60. struct compute_buffer_size< store_n_bytes<N>, T >
  61. {
  62. BOOST_STATIC_CONSTANT( unsigned, value = N );
  63. };
  64. template< class Policy, class T >
  65. struct compute_buffer_objects
  66. {
  67. BOOST_STATIC_CONSTANT( unsigned, value = Policy::value );
  68. };
  69. template< unsigned N, class T >
  70. struct compute_buffer_objects< store_n_bytes<N>, T >
  71. {
  72. BOOST_STATIC_CONSTANT( unsigned, value = N / sizeof(T) );
  73. };
  74. }
  75. struct default_grow_policy
  76. {
  77. template< class SizeType >
  78. static SizeType new_capacity( SizeType capacity )
  79. {
  80. //
  81. // @remark: we grow the capacity quite agressively.
  82. // this is justified since we aim to minimize
  83. // heap-allocations, and because we mostly use
  84. // the buffer locally.
  85. return capacity * 4u;
  86. }
  87. template< class SizeType >
  88. static bool should_shrink( SizeType, SizeType )
  89. {
  90. //
  91. // @remark: when defining a new grow policy, one might
  92. // choose that if the waated space is less
  93. // than a certain percentage, then it is of
  94. // little use to shrink.
  95. //
  96. return true;
  97. }
  98. };
  99. template< class T,
  100. class StackBufferPolicy = store_n_objects<256>,
  101. class GrowPolicy = default_grow_policy,
  102. class Allocator = std::allocator<T> >
  103. class auto_buffer;
  104. template
  105. <
  106. class T,
  107. class StackBufferPolicy,
  108. class GrowPolicy,
  109. class Allocator
  110. >
  111. class auto_buffer : Allocator
  112. {
  113. private:
  114. enum { N = auto_buffer_detail::
  115. compute_buffer_objects<StackBufferPolicy,T>::value };
  116. BOOST_STATIC_CONSTANT( bool, is_stack_buffer_empty = N == 0u );
  117. typedef auto_buffer<T, store_n_objects<0>, GrowPolicy, Allocator>
  118. local_buffer;
  119. public:
  120. typedef Allocator allocator_type;
  121. typedef T value_type;
  122. typedef typename Allocator::size_type size_type;
  123. typedef typename Allocator::difference_type difference_type;
  124. typedef T* pointer;
  125. typedef typename Allocator::pointer allocator_pointer;
  126. typedef const T* const_pointer;
  127. typedef T& reference;
  128. typedef const T& const_reference;
  129. typedef pointer iterator;
  130. typedef const_pointer const_iterator;
  131. typedef boost::reverse_iterator<iterator> reverse_iterator;
  132. typedef boost::reverse_iterator<const_iterator> const_reverse_iterator;
  133. typedef typename boost::mpl::if_c< boost::has_trivial_assign<T>::value
  134. && sizeof(T) <= sizeof(long double),
  135. const value_type,
  136. const_reference >::type
  137. optimized_const_reference;
  138. private:
  139. pointer allocate( size_type capacity_arg )
  140. {
  141. if( capacity_arg > N )
  142. return &*get_allocator().allocate( capacity_arg );
  143. else
  144. return static_cast<T*>( members_.address() );
  145. }
  146. void deallocate( pointer where, size_type capacity_arg )
  147. {
  148. if( capacity_arg <= N )
  149. return;
  150. get_allocator().deallocate( allocator_pointer(where), capacity_arg );
  151. }
  152. template< class I >
  153. static void copy_impl( I begin, I end, pointer where, std::random_access_iterator_tag )
  154. {
  155. copy_rai( begin, end, where, boost::has_trivial_assign<T>() );
  156. }
  157. static void copy_rai( const T* begin, const T* end,
  158. pointer where, const boost::true_type& )
  159. {
  160. std::memcpy( where, begin, sizeof(T) * std::distance(begin,end) );
  161. }
  162. template< class I, bool b >
  163. static void copy_rai( I begin, I end,
  164. pointer where, const boost::integral_constant<bool, b>& )
  165. {
  166. std::uninitialized_copy( begin, end, where );
  167. }
  168. template< class I >
  169. static void copy_impl( I begin, I end, pointer where, std::bidirectional_iterator_tag )
  170. {
  171. std::uninitialized_copy( begin, end, where );
  172. }
  173. template< class I >
  174. static void copy_impl( I begin, I end, pointer where )
  175. {
  176. copy_impl( begin, end, where,
  177. typename std::iterator_traits<I>::iterator_category() );
  178. }
  179. template< class I, class I2 >
  180. static void assign_impl( I begin, I end, I2 where )
  181. {
  182. assign_impl( begin, end, where, boost::has_trivial_assign<T>() );
  183. }
  184. template< class I, class I2 >
  185. static void assign_impl( I begin, I end, I2 where, const boost::true_type& )
  186. {
  187. std::memcpy( where, begin, sizeof(T) * std::distance(begin,end) );
  188. }
  189. template< class I, class I2 >
  190. static void assign_impl( I begin, I end, I2 where, const boost::false_type& )
  191. {
  192. for( ; begin != end; ++begin, ++where )
  193. *where = *begin;
  194. }
  195. void unchecked_push_back_n( size_type n, const boost::true_type& )
  196. {
  197. std::uninitialized_fill( end(), end() + n, T() );
  198. size_ += n;
  199. }
  200. void unchecked_push_back_n( size_type n, const boost::false_type& )
  201. {
  202. for( size_type i = 0u; i < n; ++i )
  203. unchecked_push_back();
  204. }
  205. void auto_buffer_destroy( pointer where, const boost::false_type& )
  206. {
  207. (*where).~T();
  208. }
  209. void auto_buffer_destroy( pointer, const boost::true_type& )
  210. { }
  211. void auto_buffer_destroy( pointer where )
  212. {
  213. auto_buffer_destroy( where, boost::has_trivial_destructor<T>() );
  214. }
  215. void auto_buffer_destroy()
  216. {
  217. BOOST_ASSERT( is_valid() );
  218. if( buffer_ ) // do we need this check? Yes, but only
  219. // for N = 0u + local instances in one_sided_swap()
  220. auto_buffer_destroy( boost::has_trivial_destructor<T>() );
  221. }
  222. void destroy_back_n( size_type n, const boost::false_type& )
  223. {
  224. BOOST_ASSERT( n > 0 );
  225. pointer buffer = buffer_ + size_ - 1u;
  226. pointer new_end = buffer - n;
  227. for( ; buffer > new_end; --buffer )
  228. auto_buffer_destroy( buffer );
  229. }
  230. void destroy_back_n( size_type, const boost::true_type& )
  231. { }
  232. void destroy_back_n( size_type n )
  233. {
  234. destroy_back_n( n, boost::has_trivial_destructor<T>() );
  235. }
  236. void auto_buffer_destroy( const boost::false_type& x )
  237. {
  238. if( size_ )
  239. destroy_back_n( size_, x );
  240. deallocate( buffer_, members_.capacity_ );
  241. }
  242. void auto_buffer_destroy( const boost::true_type& )
  243. {
  244. deallocate( buffer_, members_.capacity_ );
  245. }
  246. pointer move_to_new_buffer( size_type new_capacity, const boost::false_type& )
  247. {
  248. pointer new_buffer = allocate( new_capacity ); // strong
  249. boost::multi_index::detail::scope_guard guard =
  250. boost::multi_index::detail::make_obj_guard( *this,
  251. &auto_buffer::deallocate,
  252. new_buffer,
  253. new_capacity );
  254. copy_impl( begin(), end(), new_buffer ); // strong
  255. guard.dismiss(); // nothrow
  256. return new_buffer;
  257. }
  258. pointer move_to_new_buffer( size_type new_capacity, const boost::true_type& )
  259. {
  260. pointer new_buffer = allocate( new_capacity ); // strong
  261. copy_impl( begin(), end(), new_buffer ); // nothrow
  262. return new_buffer;
  263. }
  264. void reserve_impl( size_type new_capacity )
  265. {
  266. pointer new_buffer = move_to_new_buffer( new_capacity,
  267. boost::has_nothrow_copy<T>() );
  268. auto_buffer_destroy();
  269. buffer_ = new_buffer;
  270. members_.capacity_ = new_capacity;
  271. BOOST_ASSERT( size_ <= members_.capacity_ );
  272. }
  273. size_type new_capacity_impl( size_type n )
  274. {
  275. BOOST_ASSERT( n > members_.capacity_ );
  276. size_type new_capacity = GrowPolicy::new_capacity( members_.capacity_ );
  277. // @todo: consider to check for allocator.max_size()
  278. return (std::max)(new_capacity,n);
  279. }
  280. static void swap_helper( auto_buffer& l, auto_buffer& r,
  281. const boost::true_type& )
  282. {
  283. BOOST_ASSERT( l.is_on_stack() && r.is_on_stack() );
  284. auto_buffer temp( l.begin(), l.end() );
  285. assign_impl( r.begin(), r.end(), l.begin() );
  286. assign_impl( temp.begin(), temp.end(), r.begin() );
  287. boost::swap( l.size_, r.size_ );
  288. boost::swap( l.members_.capacity_, r.members_.capacity_ );
  289. }
  290. static void swap_helper( auto_buffer& l, auto_buffer& r,
  291. const boost::false_type& )
  292. {
  293. BOOST_ASSERT( l.is_on_stack() && r.is_on_stack() );
  294. size_type min_size = (std::min)(l.size_,r.size_);
  295. size_type max_size = (std::max)(l.size_,r.size_);
  296. size_type diff = max_size - min_size;
  297. auto_buffer* smallest = l.size_ == min_size ? &l : &r;
  298. auto_buffer* largest = smallest == &l ? &r : &l;
  299. // @remark: the implementation below is not as fast
  300. // as it could be if we assumed T had a default
  301. // constructor.
  302. size_type i = 0u;
  303. for( ; i < min_size; ++i )
  304. boost::swap( (*smallest)[i], (*largest)[i] );
  305. for( ; i < max_size; ++i )
  306. smallest->unchecked_push_back( (*largest)[i] );
  307. largest->pop_back_n( diff );
  308. boost::swap( l.members_.capacity_, r.members_.capacity_ );
  309. }
  310. void one_sided_swap( auto_buffer& temp ) // nothrow
  311. {
  312. BOOST_ASSERT( !temp.is_on_stack() );
  313. auto_buffer_destroy();
  314. // @remark: must be nothrow
  315. get_allocator() = temp.get_allocator();
  316. members_.capacity_ = temp.members_.capacity_;
  317. buffer_ = temp.buffer_;
  318. BOOST_ASSERT( temp.size_ >= size_ + 1u );
  319. size_ = temp.size_;
  320. temp.buffer_ = 0;
  321. BOOST_ASSERT( temp.is_valid() );
  322. }
  323. template< class I >
  324. void insert_impl( const_iterator before, I begin_arg, I end_arg,
  325. std::input_iterator_tag )
  326. {
  327. for( ; begin_arg != end_arg; ++begin_arg )
  328. {
  329. before = insert( before, *begin_arg );
  330. ++before;
  331. }
  332. }
  333. void grow_back( size_type n, const boost::true_type& )
  334. {
  335. BOOST_ASSERT( size_ + n <= members_.capacity_ );
  336. size_ += n;
  337. }
  338. void grow_back( size_type n, const boost::false_type& )
  339. {
  340. unchecked_push_back_n(n);
  341. }
  342. void grow_back( size_type n )
  343. {
  344. grow_back( n, boost::has_trivial_constructor<T>() );
  345. }
  346. void grow_back_one( const boost::true_type& )
  347. {
  348. BOOST_ASSERT( size_ + 1 <= members_.capacity_ );
  349. size_ += 1;
  350. }
  351. void grow_back_one( const boost::false_type& )
  352. {
  353. unchecked_push_back();
  354. }
  355. void grow_back_one()
  356. {
  357. grow_back_one( boost::has_trivial_constructor<T>() );
  358. }
  359. template< class I >
  360. void insert_impl( const_iterator before, I begin_arg, I end_arg,
  361. std::forward_iterator_tag )
  362. {
  363. difference_type n = std::distance(begin_arg, end_arg);
  364. if( size_ + n <= members_.capacity_ )
  365. {
  366. bool is_back_insertion = before == cend();
  367. if( !is_back_insertion )
  368. {
  369. grow_back( n );
  370. iterator where = const_cast<T*>(before);
  371. std::copy( before, cend() - n, where + n );
  372. assign_impl( begin_arg, end_arg, where );
  373. }
  374. else
  375. {
  376. unchecked_push_back( begin_arg, end_arg );
  377. }
  378. BOOST_ASSERT( is_valid() );
  379. return;
  380. }
  381. auto_buffer temp( new_capacity_impl( size_ + n ) );
  382. temp.unchecked_push_back( cbegin(), before );
  383. temp.unchecked_push_back( begin_arg, end_arg );
  384. temp.unchecked_push_back( before, cend() );
  385. one_sided_swap( temp );
  386. BOOST_ASSERT( is_valid() );
  387. }
  388. public:
  389. bool is_valid() const // invariant
  390. {
  391. // @remark: allowed for N==0 and when
  392. // using a locally instance
  393. // in insert()/one_sided_swap()
  394. if( buffer_ == 0 )
  395. return true;
  396. if( members_.capacity_ < N )
  397. return false;
  398. if( !is_on_stack() && members_.capacity_ <= N )
  399. return false;
  400. if( buffer_ == members_.address() )
  401. if( members_.capacity_ > N )
  402. return false;
  403. if( size_ > members_.capacity_ )
  404. return false;
  405. return true;
  406. }
  407. auto_buffer()
  408. : members_( N ),
  409. buffer_( static_cast<T*>(members_.address()) ),
  410. size_( 0u )
  411. {
  412. BOOST_ASSERT( is_valid() );
  413. }
  414. auto_buffer( const auto_buffer& r )
  415. : members_( (std::max)(r.size_,size_type(N)) ),
  416. buffer_( allocate( members_.capacity_ ) ),
  417. size_( 0 )
  418. {
  419. copy_impl( r.begin(), r.end(), buffer_ );
  420. size_ = r.size_;
  421. BOOST_ASSERT( is_valid() );
  422. }
  423. auto_buffer& operator=( const auto_buffer& r ) // basic
  424. {
  425. if( this == &r )
  426. return *this;
  427. difference_type diff = size_ - r.size_;
  428. if( diff >= 0 )
  429. {
  430. pop_back_n( static_cast<size_type>(diff) );
  431. assign_impl( r.begin(), r.end(), begin() );
  432. }
  433. else
  434. {
  435. if( members_.capacity_ >= r.size() )
  436. {
  437. unchecked_push_back_n( static_cast<size_type>(-diff) );
  438. assign_impl( r.begin(), r.end(), begin() );
  439. }
  440. else
  441. {
  442. // @remark: we release memory as early as possible
  443. // since we only give the basic guarantee
  444. auto_buffer_destroy();
  445. buffer_ = 0;
  446. pointer new_buffer = allocate( r.size() );
  447. boost::multi_index::detail::scope_guard guard =
  448. boost::multi_index::detail::make_obj_guard( *this,
  449. &auto_buffer::deallocate,
  450. new_buffer,
  451. r.size() );
  452. copy_impl( r.begin(), r.end(), new_buffer );
  453. guard.dismiss();
  454. buffer_ = new_buffer;
  455. members_.capacity_ = r.size();
  456. size_ = members_.capacity_;
  457. }
  458. }
  459. BOOST_ASSERT( size() == r.size() );
  460. BOOST_ASSERT( is_valid() );
  461. return *this;
  462. }
  463. explicit auto_buffer( size_type capacity_arg )
  464. : members_( (std::max)(capacity_arg, size_type(N)) ),
  465. buffer_( allocate(members_.capacity_) ),
  466. size_( 0 )
  467. {
  468. BOOST_ASSERT( is_valid() );
  469. }
  470. auto_buffer( size_type size_arg, optimized_const_reference init_value )
  471. : members_( (std::max)(size_arg, size_type(N)) ),
  472. buffer_( allocate(members_.capacity_) ),
  473. size_( 0 )
  474. {
  475. std::uninitialized_fill( buffer_, buffer_ + size_arg, init_value );
  476. size_ = size_arg;
  477. BOOST_ASSERT( is_valid() );
  478. }
  479. auto_buffer( size_type capacity_arg, const allocator_type& a )
  480. : allocator_type( a ),
  481. members_( (std::max)(capacity_arg, size_type(N)) ),
  482. buffer_( allocate(members_.capacity_) ),
  483. size_( 0 )
  484. {
  485. BOOST_ASSERT( is_valid() );
  486. }
  487. auto_buffer( size_type size_arg, optimized_const_reference init_value,
  488. const allocator_type& a )
  489. : allocator_type( a ),
  490. members_( (std::max)(size_arg, size_type(N)) ),
  491. buffer_( allocate(members_.capacity_) ),
  492. size_( 0 )
  493. {
  494. std::uninitialized_fill( buffer_, buffer_ + size_arg, init_value );
  495. size_ = size_arg;
  496. BOOST_ASSERT( is_valid() );
  497. }
  498. template< class ForwardIterator >
  499. auto_buffer( ForwardIterator begin_arg, ForwardIterator end_arg )
  500. :
  501. members_( std::distance(begin_arg, end_arg) ),
  502. buffer_( allocate(members_.capacity_) ),
  503. size_( 0 )
  504. {
  505. copy_impl( begin_arg, end_arg, buffer_ );
  506. size_ = members_.capacity_;
  507. if( members_.capacity_ < N )
  508. members_.capacity_ = N;
  509. BOOST_ASSERT( is_valid() );
  510. }
  511. template< class ForwardIterator >
  512. auto_buffer( ForwardIterator begin_arg, ForwardIterator end_arg,
  513. const allocator_type& a )
  514. : allocator_type( a ),
  515. members_( std::distance(begin_arg, end_arg) ),
  516. buffer_( allocate(members_.capacity_) ),
  517. size_( 0 )
  518. {
  519. copy_impl( begin_arg, end_arg, buffer_ );
  520. size_ = members_.capacity_;
  521. if( members_.capacity_ < N )
  522. members_.capacity_ = N;
  523. BOOST_ASSERT( is_valid() );
  524. }
  525. ~auto_buffer()
  526. {
  527. auto_buffer_destroy();
  528. }
  529. public:
  530. bool empty() const
  531. {
  532. return size_ == 0;
  533. }
  534. bool full() const
  535. {
  536. return size_ == members_.capacity_;
  537. }
  538. bool is_on_stack() const
  539. {
  540. return members_.capacity_ <= N;
  541. }
  542. size_type size() const
  543. {
  544. return size_;
  545. }
  546. size_type capacity() const
  547. {
  548. return members_.capacity_;
  549. }
  550. public:
  551. pointer data()
  552. {
  553. return buffer_;
  554. }
  555. const_pointer data() const
  556. {
  557. return buffer_;
  558. }
  559. allocator_type& get_allocator()
  560. {
  561. return static_cast<allocator_type&>(*this);
  562. }
  563. const allocator_type& get_allocator() const
  564. {
  565. return static_cast<const allocator_type&>(*this);
  566. }
  567. public:
  568. iterator begin()
  569. {
  570. return buffer_;
  571. }
  572. const_iterator begin() const
  573. {
  574. return buffer_;
  575. }
  576. iterator end()
  577. {
  578. return buffer_ + size_;
  579. }
  580. const_iterator end() const
  581. {
  582. return buffer_ + size_;
  583. }
  584. reverse_iterator rbegin()
  585. {
  586. return reverse_iterator(end());
  587. }
  588. const_reverse_iterator rbegin() const
  589. {
  590. return const_reverse_iterator(end());
  591. }
  592. reverse_iterator rend()
  593. {
  594. return reverse_iterator(begin());
  595. }
  596. const_reverse_iterator rend() const
  597. {
  598. return const_reverse_iterator(begin());
  599. }
  600. const_iterator cbegin() const
  601. {
  602. return const_cast<const auto_buffer*>(this)->begin();
  603. }
  604. const_iterator cend() const
  605. {
  606. return const_cast<const auto_buffer*>(this)->end();
  607. }
  608. const_reverse_iterator crbegin() const
  609. {
  610. return const_cast<const auto_buffer*>(this)->rbegin();
  611. }
  612. const_reverse_iterator crend() const
  613. {
  614. return const_cast<const auto_buffer*>(this)->rend();
  615. }
  616. public:
  617. reference front()
  618. {
  619. return buffer_[0];
  620. }
  621. optimized_const_reference front() const
  622. {
  623. return buffer_[0];
  624. }
  625. reference back()
  626. {
  627. return buffer_[size_-1];
  628. }
  629. optimized_const_reference back() const
  630. {
  631. return buffer_[size_-1];
  632. }
  633. reference operator[]( size_type n )
  634. {
  635. BOOST_ASSERT( n < size_ );
  636. return buffer_[n];
  637. }
  638. optimized_const_reference operator[]( size_type n ) const
  639. {
  640. BOOST_ASSERT( n < size_ );
  641. return buffer_[n];
  642. }
  643. void unchecked_push_back()
  644. {
  645. BOOST_ASSERT( !full() );
  646. new (buffer_ + size_) T;
  647. ++size_;
  648. }
  649. void unchecked_push_back_n( size_type n )
  650. {
  651. BOOST_ASSERT( size_ + n <= members_.capacity_ );
  652. unchecked_push_back_n( n, boost::has_trivial_assign<T>() );
  653. }
  654. void unchecked_push_back( optimized_const_reference x ) // non-growing
  655. {
  656. BOOST_ASSERT( !full() );
  657. new (buffer_ + size_) T( x );
  658. ++size_;
  659. }
  660. template< class ForwardIterator >
  661. void unchecked_push_back( ForwardIterator begin_arg,
  662. ForwardIterator end_arg ) // non-growing
  663. {
  664. BOOST_ASSERT( size_ + std::distance(begin_arg, end_arg) <= members_.capacity_ );
  665. copy_impl( begin_arg, end_arg, buffer_ + size_ );
  666. size_ += std::distance(begin_arg, end_arg);
  667. }
  668. void reserve_precisely( size_type n )
  669. {
  670. BOOST_ASSERT( members_.capacity_ >= N );
  671. if( n <= members_.capacity_ )
  672. return;
  673. reserve_impl( n );
  674. BOOST_ASSERT( members_.capacity_ == n );
  675. }
  676. void reserve( size_type n ) // strong
  677. {
  678. BOOST_ASSERT( members_.capacity_ >= N );
  679. if( n <= members_.capacity_ )
  680. return;
  681. reserve_impl( new_capacity_impl( n ) );
  682. BOOST_ASSERT( members_.capacity_ >= n );
  683. }
  684. void push_back()
  685. {
  686. if( size_ != members_.capacity_ )
  687. {
  688. unchecked_push_back();
  689. }
  690. else
  691. {
  692. reserve( size_ + 1u );
  693. unchecked_push_back();
  694. }
  695. }
  696. void push_back( optimized_const_reference x )
  697. {
  698. if( size_ != members_.capacity_ )
  699. {
  700. unchecked_push_back( x );
  701. }
  702. else
  703. {
  704. reserve( size_ + 1u );
  705. unchecked_push_back( x );
  706. }
  707. }
  708. template< class ForwardIterator >
  709. void push_back( ForwardIterator begin_arg, ForwardIterator end_arg )
  710. {
  711. difference_type diff = std::distance(begin_arg, end_arg);
  712. if( size_ + diff > members_.capacity_ )
  713. reserve( size_ + diff );
  714. unchecked_push_back( begin_arg, end_arg );
  715. }
  716. iterator insert( const_iterator before, optimized_const_reference x ) // basic
  717. {
  718. // @todo: consider if we want to support x in 'this'
  719. if( size_ < members_.capacity_ )
  720. {
  721. bool is_back_insertion = before == cend();
  722. iterator where = const_cast<T*>(before);
  723. if( !is_back_insertion )
  724. {
  725. grow_back_one();
  726. std::copy( before, cend() - 1u, where + 1u );
  727. *where = x;
  728. BOOST_ASSERT( is_valid() );
  729. }
  730. else
  731. {
  732. unchecked_push_back( x );
  733. }
  734. return where;
  735. }
  736. auto_buffer temp( new_capacity_impl( size_ + 1u ) );
  737. temp.unchecked_push_back( cbegin(), before );
  738. iterator result = temp.end();
  739. temp.unchecked_push_back( x );
  740. temp.unchecked_push_back( before, cend() );
  741. one_sided_swap( temp );
  742. BOOST_ASSERT( is_valid() );
  743. return result;
  744. }
  745. void insert( const_iterator before, size_type n,
  746. optimized_const_reference x )
  747. {
  748. // @todo: see problems above
  749. if( size_ + n <= members_.capacity_ )
  750. {
  751. grow_back( n );
  752. iterator where = const_cast<T*>(before);
  753. std::copy( before, cend() - n, where + n );
  754. std::fill( where, where + n, x );
  755. BOOST_ASSERT( is_valid() );
  756. return;
  757. }
  758. auto_buffer temp( new_capacity_impl( size_ + n ) );
  759. temp.unchecked_push_back( cbegin(), before );
  760. std::uninitialized_fill_n( temp.end(), n, x );
  761. temp.size_ += n;
  762. temp.unchecked_push_back( before, cend() );
  763. one_sided_swap( temp );
  764. BOOST_ASSERT( is_valid() );
  765. }
  766. template< class ForwardIterator >
  767. void insert( const_iterator before,
  768. ForwardIterator begin_arg, ForwardIterator end_arg ) // basic
  769. {
  770. typedef typename std::iterator_traits<ForwardIterator>
  771. ::iterator_category category;
  772. insert_impl( before, begin_arg, end_arg, category() );
  773. }
  774. void pop_back()
  775. {
  776. BOOST_ASSERT( !empty() );
  777. auto_buffer_destroy( buffer_ + size_ - 1, boost::has_trivial_destructor<T>() );
  778. --size_;
  779. }
  780. void pop_back_n( size_type n )
  781. {
  782. BOOST_ASSERT( n <= size_ );
  783. if( n )
  784. {
  785. destroy_back_n( n );
  786. size_ -= n;
  787. }
  788. }
  789. void clear()
  790. {
  791. pop_back_n( size_ );
  792. }
  793. iterator erase( const_iterator where )
  794. {
  795. BOOST_ASSERT( !empty() );
  796. BOOST_ASSERT( cbegin() <= where );
  797. BOOST_ASSERT( cend() > where );
  798. unsigned elements = cend() - where - 1u;
  799. if( elements > 0u )
  800. {
  801. const_iterator start = where + 1u;
  802. std::copy( start, start + elements,
  803. const_cast<T*>(where) );
  804. }
  805. pop_back();
  806. BOOST_ASSERT( !full() );
  807. iterator result = const_cast<T*>( where );
  808. BOOST_ASSERT( result <= end() );
  809. return result;
  810. }
  811. iterator erase( const_iterator from, const_iterator to )
  812. {
  813. BOOST_ASSERT( !(std::distance(from,to)>0) ||
  814. !empty() );
  815. BOOST_ASSERT( cbegin() <= from );
  816. BOOST_ASSERT( cend() >= to );
  817. unsigned elements = std::distance(to,cend());
  818. if( elements > 0u )
  819. {
  820. BOOST_ASSERT( elements > 0u );
  821. std::copy( to, to + elements,
  822. const_cast<T*>(from) );
  823. }
  824. pop_back_n( std::distance(from,to) );
  825. BOOST_ASSERT( !full() );
  826. iterator result = const_cast<T*>( from );
  827. BOOST_ASSERT( result <= end() );
  828. return result;
  829. }
  830. void shrink_to_fit()
  831. {
  832. if( is_on_stack() || !GrowPolicy::should_shrink(size_,members_.capacity_) )
  833. return;
  834. reserve_impl( size_ );
  835. members_.capacity_ = (std::max)(size_type(N),members_.capacity_);
  836. BOOST_ASSERT( is_on_stack() || size_ == members_.capacity_ );
  837. BOOST_ASSERT( !is_on_stack() || size_ <= members_.capacity_ );
  838. }
  839. pointer uninitialized_grow( size_type n ) // strong
  840. {
  841. if( size_ + n > members_.capacity_ )
  842. reserve( size_ + n );
  843. pointer res = end();
  844. size_ += n;
  845. return res;
  846. }
  847. void uninitialized_shrink( size_type n ) // nothrow
  848. {
  849. // @remark: test for wrap-around
  850. BOOST_ASSERT( size_ - n <= members_.capacity_ );
  851. size_ -= n;
  852. }
  853. void uninitialized_resize( size_type n )
  854. {
  855. if( n > size() )
  856. uninitialized_grow( n - size() );
  857. else if( n < size() )
  858. uninitialized_shrink( size() - n );
  859. BOOST_ASSERT( size() == n );
  860. }
  861. // nothrow - if both buffer are on the heap, or
  862. // - if one buffer is on the heap and one has
  863. // 'has_allocated_buffer() == false', or
  864. // - if copy-construction cannot throw
  865. // basic - otherwise (better guarantee impossible)
  866. // requirement: the allocator must be no-throw-swappable
  867. void swap( auto_buffer& r )
  868. {
  869. bool on_stack = is_on_stack();
  870. bool r_on_stack = r.is_on_stack();
  871. bool both_on_heap = !on_stack && !r_on_stack;
  872. if( both_on_heap )
  873. {
  874. boost::swap( get_allocator(), r.get_allocator() );
  875. boost::swap( members_.capacity_, r.members_.capacity_ );
  876. boost::swap( buffer_, r.buffer_ );
  877. boost::swap( size_, r.size_ );
  878. BOOST_ASSERT( is_valid() );
  879. BOOST_ASSERT( r.is_valid() );
  880. return;
  881. }
  882. BOOST_ASSERT( on_stack || r_on_stack );
  883. bool exactly_one_on_stack = (on_stack && !r_on_stack) ||
  884. (!on_stack && r_on_stack);
  885. //
  886. // Remark: we now know that we can copy into
  887. // the unused stack buffer.
  888. //
  889. if( exactly_one_on_stack )
  890. {
  891. auto_buffer* one_on_stack = on_stack ? this : &r;
  892. auto_buffer* other = on_stack ? &r : this;
  893. pointer new_buffer = static_cast<T*>(other->members_.address());
  894. copy_impl( one_on_stack->begin(), one_on_stack->end(),
  895. new_buffer ); // strong
  896. one_on_stack->auto_buffer_destroy(); // nothrow
  897. boost::swap( get_allocator(), r.get_allocator() ); // assume nothrow
  898. boost::swap( members_.capacity_, r.members_.capacity_ );
  899. boost::swap( size_, r.size_ );
  900. one_on_stack->buffer_ = other->buffer_;
  901. other->buffer_ = new_buffer;
  902. BOOST_ASSERT( other->is_on_stack() );
  903. BOOST_ASSERT( !one_on_stack->is_on_stack() );
  904. BOOST_ASSERT( is_valid() );
  905. BOOST_ASSERT( r.is_valid() );
  906. return;
  907. }
  908. BOOST_ASSERT( on_stack && r_on_stack );
  909. swap_helper( *this, r, boost::has_trivial_assign<T>() );
  910. BOOST_ASSERT( is_valid() );
  911. BOOST_ASSERT( r.is_valid() );
  912. }
  913. private:
  914. typedef boost::aligned_storage< N * sizeof(T),
  915. boost::alignment_of<T>::value >
  916. storage;
  917. struct members_type : storage /* to enable EBO */
  918. {
  919. size_type capacity_;
  920. members_type( size_type capacity )
  921. : capacity_(capacity)
  922. { }
  923. void* address() const
  924. { return const_cast<storage&>(static_cast<const storage&>(*this)).address(); }
  925. };
  926. members_type members_;
  927. pointer buffer_;
  928. size_type size_;
  929. };
  930. template< class T, class SBP, class GP, class A >
  931. inline void swap( auto_buffer<T,SBP,GP,A>& l, auto_buffer<T,SBP,GP,A>& r )
  932. {
  933. l.swap( r );
  934. }
  935. template< class T, class SBP, class GP, class A >
  936. inline bool operator==( const auto_buffer<T,SBP,GP,A>& l,
  937. const auto_buffer<T,SBP,GP,A>& r )
  938. {
  939. if( l.size() != r.size() )
  940. return false;
  941. return std::equal( l.begin(), l.end(), r.begin() );
  942. }
  943. template< class T, class SBP, class GP, class A >
  944. inline bool operator!=( const auto_buffer<T,SBP,GP,A>& l,
  945. const auto_buffer<T,SBP,GP,A>& r )
  946. {
  947. return !(l == r);
  948. }
  949. template< class T, class SBP, class GP, class A >
  950. inline bool operator<( const auto_buffer<T,SBP,GP,A>& l,
  951. const auto_buffer<T,SBP,GP,A>& r )
  952. {
  953. return std::lexicographical_compare( l.begin(), l.end(),
  954. r.begin(), r.end() );
  955. }
  956. template< class T, class SBP, class GP, class A >
  957. inline bool operator>( const auto_buffer<T,SBP,GP,A>& l,
  958. const auto_buffer<T,SBP,GP,A>& r )
  959. {
  960. return (r < l);
  961. }
  962. template< class T, class SBP, class GP, class A >
  963. inline bool operator<=( const auto_buffer<T,SBP,GP,A>& l,
  964. const auto_buffer<T,SBP,GP,A>& r )
  965. {
  966. return !(l > r);
  967. }
  968. template< class T, class SBP, class GP, class A >
  969. inline bool operator>=( const auto_buffer<T,SBP,GP,A>& l,
  970. const auto_buffer<T,SBP,GP,A>& r )
  971. {
  972. return !(l < r);
  973. }
  974. } // namespace detail
  975. } // namespace signals2
  976. }
  977. #if BOOST_WORKAROUND(BOOST_MSVC, >= 1400)
  978. #pragma warning(pop)
  979. #endif
  980. #endif