freelist.hpp 19 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673
  1. // lock-free freelist
  2. //
  3. // Copyright (C) 2008-2016 Tim Blechmann
  4. //
  5. // Distributed under the Boost Software License, Version 1.0. (See
  6. // accompanying file LICENSE_1_0.txt or copy at
  7. // http://www.boost.org/LICENSE_1_0.txt)
  8. #ifndef BOOST_LOCKFREE_FREELIST_HPP_INCLUDED
  9. #define BOOST_LOCKFREE_FREELIST_HPP_INCLUDED
  10. #include <array>
  11. #include <cstdint>
  12. #include <cstring>
  13. #include <limits>
  14. #include <memory>
  15. #include <stdexcept>
  16. #include <boost/align/align_up.hpp>
  17. #include <boost/align/aligned_allocator_adaptor.hpp>
  18. #include <boost/config.hpp>
  19. #include <boost/static_assert.hpp>
  20. #include <boost/throw_exception.hpp>
  21. #include <boost/lockfree/detail/atomic.hpp>
  22. #include <boost/lockfree/detail/parameter.hpp>
  23. #include <boost/lockfree/detail/tagged_ptr.hpp>
  24. #if defined( _MSC_VER )
  25. # pragma warning( push )
  26. # pragma warning( disable : 4100 ) // unreferenced formal parameter
  27. # pragma warning( disable : 4127 ) // conditional expression is constant
  28. #endif
  29. namespace boost { namespace lockfree { namespace detail {
  30. //----------------------------------------------------------------------------------------------------------------------
  31. template < typename T, typename Alloc = std::allocator< T > >
  32. class alignas( cacheline_bytes ) freelist_stack : Alloc
  33. {
  34. struct freelist_node
  35. {
  36. tagged_ptr< freelist_node > next;
  37. };
  38. typedef tagged_ptr< freelist_node > tagged_node_ptr;
  39. public:
  40. typedef T* index_t;
  41. typedef tagged_ptr< T > tagged_node_handle;
  42. template < typename Allocator >
  43. freelist_stack( Allocator const& alloc, std::size_t n = 0 ) :
  44. Alloc( alloc ),
  45. pool_( tagged_node_ptr( NULL ) )
  46. {
  47. for ( std::size_t i = 0; i != n; ++i ) {
  48. T* node = Alloc::allocate( 1 );
  49. std::memset( (void*)node, 0, sizeof( T ) );
  50. #ifdef BOOST_LOCKFREE_FREELIST_INIT_RUNS_DTOR
  51. destruct< false >( node );
  52. #else
  53. deallocate< false >( node );
  54. #endif
  55. }
  56. }
  57. template < bool ThreadSafe >
  58. void reserve( std::size_t count )
  59. {
  60. for ( std::size_t i = 0; i != count; ++i ) {
  61. T* node = Alloc::allocate( 1 );
  62. std::memset( (void*)node, 0, sizeof( T ) );
  63. deallocate< ThreadSafe >( node );
  64. }
  65. }
  66. template < bool ThreadSafe, bool Bounded >
  67. T* construct( void )
  68. {
  69. T* node = allocate< ThreadSafe, Bounded >();
  70. if ( node )
  71. new ( node ) T();
  72. return node;
  73. }
  74. template < bool ThreadSafe, bool Bounded, typename ArgumentType >
  75. T* construct( const ArgumentType& arg )
  76. {
  77. T* node = allocate< ThreadSafe, Bounded >();
  78. if ( node )
  79. new ( node ) T( arg );
  80. return node;
  81. }
  82. template < bool ThreadSafe, bool Bounded, typename ArgumentType >
  83. T* construct( ArgumentType&& arg )
  84. {
  85. T* node = allocate< ThreadSafe, Bounded >();
  86. if ( node )
  87. new ( node ) T( std::forward< ArgumentType >( arg ) );
  88. return node;
  89. }
  90. template < bool ThreadSafe, bool Bounded, typename ArgumentType1, typename ArgumentType2 >
  91. T* construct( ArgumentType1&& arg1, ArgumentType2&& arg2 )
  92. {
  93. T* node = allocate< ThreadSafe, Bounded >();
  94. if ( node )
  95. new ( node ) T( arg1, arg2 );
  96. return node;
  97. }
  98. template < bool ThreadSafe >
  99. void destruct( tagged_node_handle const& tagged_ptr )
  100. {
  101. T* n = tagged_ptr.get_ptr();
  102. n->~T();
  103. deallocate< ThreadSafe >( n );
  104. }
  105. template < bool ThreadSafe >
  106. void destruct( T* n )
  107. {
  108. n->~T();
  109. deallocate< ThreadSafe >( n );
  110. }
  111. ~freelist_stack( void )
  112. {
  113. tagged_node_ptr current = pool_.load();
  114. while ( current ) {
  115. freelist_node* current_ptr = current.get_ptr();
  116. if ( current_ptr )
  117. current = current_ptr->next;
  118. Alloc::deallocate( (T*)current_ptr, 1 );
  119. }
  120. }
  121. bool is_lock_free( void ) const
  122. {
  123. return pool_.is_lock_free();
  124. }
  125. T* get_handle( T* pointer ) const
  126. {
  127. return pointer;
  128. }
  129. T* get_handle( tagged_node_handle const& handle ) const
  130. {
  131. return get_pointer( handle );
  132. }
  133. T* get_pointer( tagged_node_handle const& tptr ) const
  134. {
  135. return tptr.get_ptr();
  136. }
  137. T* get_pointer( T* pointer ) const
  138. {
  139. return pointer;
  140. }
  141. T* null_handle( void ) const
  142. {
  143. return NULL;
  144. }
  145. protected: // allow use from subclasses
  146. template < bool ThreadSafe, bool Bounded >
  147. T* allocate( void )
  148. {
  149. if ( ThreadSafe )
  150. return allocate_impl< Bounded >();
  151. else
  152. return allocate_impl_unsafe< Bounded >();
  153. }
  154. private:
  155. template < bool Bounded >
  156. T* allocate_impl( void )
  157. {
  158. tagged_node_ptr old_pool = pool_.load( memory_order_consume );
  159. for ( ;; ) {
  160. if ( !old_pool.get_ptr() ) {
  161. if ( !Bounded ) {
  162. T* ptr = Alloc::allocate( 1 );
  163. std::memset( (void*)ptr, 0, sizeof( T ) );
  164. return ptr;
  165. } else
  166. return 0;
  167. }
  168. freelist_node* new_pool_ptr = old_pool->next.get_ptr();
  169. tagged_node_ptr new_pool( new_pool_ptr, old_pool.get_next_tag() );
  170. if ( pool_.compare_exchange_weak( old_pool, new_pool ) ) {
  171. void* ptr = old_pool.get_ptr();
  172. return reinterpret_cast< T* >( ptr );
  173. }
  174. }
  175. }
  176. template < bool Bounded >
  177. T* allocate_impl_unsafe( void )
  178. {
  179. tagged_node_ptr old_pool = pool_.load( memory_order_relaxed );
  180. if ( !old_pool.get_ptr() ) {
  181. if ( !Bounded ) {
  182. T* ptr = Alloc::allocate( 1 );
  183. std::memset( (void*)ptr, 0, sizeof( T ) );
  184. return ptr;
  185. } else
  186. return 0;
  187. }
  188. freelist_node* new_pool_ptr = old_pool->next.get_ptr();
  189. tagged_node_ptr new_pool( new_pool_ptr, old_pool.get_next_tag() );
  190. pool_.store( new_pool, memory_order_relaxed );
  191. void* ptr = old_pool.get_ptr();
  192. return reinterpret_cast< T* >( ptr );
  193. }
  194. protected:
  195. template < bool ThreadSafe >
  196. void deallocate( T* n )
  197. {
  198. if ( ThreadSafe )
  199. deallocate_impl( n );
  200. else
  201. deallocate_impl_unsafe( n );
  202. }
  203. private:
  204. void deallocate_impl( T* n )
  205. {
  206. void* node = n;
  207. tagged_node_ptr old_pool = pool_.load( memory_order_consume );
  208. freelist_node* new_pool_ptr = reinterpret_cast< freelist_node* >( node );
  209. for ( ;; ) {
  210. tagged_node_ptr new_pool( new_pool_ptr, old_pool.get_tag() );
  211. new_pool->next.set_ptr( old_pool.get_ptr() );
  212. if ( pool_.compare_exchange_weak( old_pool, new_pool ) )
  213. return;
  214. }
  215. }
  216. void deallocate_impl_unsafe( T* n )
  217. {
  218. void* node = n;
  219. tagged_node_ptr old_pool = pool_.load( memory_order_relaxed );
  220. freelist_node* new_pool_ptr = reinterpret_cast< freelist_node* >( node );
  221. tagged_node_ptr new_pool( new_pool_ptr, old_pool.get_tag() );
  222. new_pool->next.set_ptr( old_pool.get_ptr() );
  223. pool_.store( new_pool, memory_order_relaxed );
  224. }
  225. atomic< tagged_node_ptr > pool_;
  226. };
  227. class tagged_index
  228. {
  229. public:
  230. typedef std::uint16_t tag_t;
  231. typedef std::uint16_t index_t;
  232. /** uninitialized constructor */
  233. tagged_index( void ) noexcept //: index(0), tag(0)
  234. {}
  235. /** copy constructor */
  236. tagged_index( tagged_index const& rhs ) = default;
  237. explicit tagged_index( index_t i, tag_t t = 0 ) :
  238. index( i ),
  239. tag( t )
  240. {}
  241. /** index access */
  242. /* @{ */
  243. index_t get_index() const
  244. {
  245. return index;
  246. }
  247. void set_index( index_t i )
  248. {
  249. index = i;
  250. }
  251. /* @} */
  252. /** tag access */
  253. /* @{ */
  254. tag_t get_tag() const
  255. {
  256. return tag;
  257. }
  258. tag_t get_next_tag() const
  259. {
  260. tag_t next = ( get_tag() + 1u ) & ( std::numeric_limits< tag_t >::max )();
  261. return next;
  262. }
  263. void set_tag( tag_t t )
  264. {
  265. tag = t;
  266. }
  267. /* @} */
  268. bool operator==( tagged_index const& rhs ) const
  269. {
  270. return ( index == rhs.index ) && ( tag == rhs.tag );
  271. }
  272. bool operator!=( tagged_index const& rhs ) const
  273. {
  274. return !operator==( rhs );
  275. }
  276. protected:
  277. index_t index;
  278. tag_t tag;
  279. };
  280. //----------------------------------------------------------------------------------------------------------------------
  281. template < typename T, std::size_t size >
  282. struct alignas( cacheline_bytes ) compiletime_sized_freelist_storage
  283. {
  284. // array-based freelists only support a 16bit address space.
  285. BOOST_STATIC_ASSERT( size < 65536 );
  286. std::array< char, size * sizeof( T ) + 64 > data;
  287. // unused ... only for API purposes
  288. template < typename Allocator >
  289. compiletime_sized_freelist_storage( Allocator const& /* alloc */, std::size_t /* count */ )
  290. {
  291. data.fill( 0 );
  292. }
  293. T* nodes( void ) const
  294. {
  295. char* data_pointer = const_cast< char* >( data.data() );
  296. return reinterpret_cast< T* >( boost::alignment::align_up( data_pointer, cacheline_bytes ) );
  297. }
  298. std::size_t node_count( void ) const
  299. {
  300. return size;
  301. }
  302. };
  303. //----------------------------------------------------------------------------------------------------------------------
  304. template < typename T, typename Alloc = std::allocator< T > >
  305. struct runtime_sized_freelist_storage : boost::alignment::aligned_allocator_adaptor< Alloc, cacheline_bytes >
  306. {
  307. typedef boost::alignment::aligned_allocator_adaptor< Alloc, cacheline_bytes > allocator_type;
  308. T* nodes_;
  309. std::size_t node_count_;
  310. template < typename Allocator >
  311. runtime_sized_freelist_storage( Allocator const& alloc, std::size_t count ) :
  312. allocator_type( alloc ),
  313. node_count_( count )
  314. {
  315. if ( count > 65535 )
  316. boost::throw_exception( std::runtime_error( "boost.lockfree: freelist size is limited to a maximum of "
  317. "65535 objects" ) );
  318. nodes_ = allocator_type::allocate( count );
  319. std::memset( (void*)nodes_, 0, sizeof( T ) * count );
  320. }
  321. ~runtime_sized_freelist_storage( void )
  322. {
  323. allocator_type::deallocate( nodes_, node_count_ );
  324. }
  325. T* nodes( void ) const
  326. {
  327. return nodes_;
  328. }
  329. std::size_t node_count( void ) const
  330. {
  331. return node_count_;
  332. }
  333. };
  334. //----------------------------------------------------------------------------------------------------------------------
  335. template < typename T, typename NodeStorage = runtime_sized_freelist_storage< T > >
  336. class fixed_size_freelist : NodeStorage
  337. {
  338. struct freelist_node
  339. {
  340. tagged_index next;
  341. };
  342. void initialize( void )
  343. {
  344. T* nodes = NodeStorage::nodes();
  345. for ( std::size_t i = 0; i != NodeStorage::node_count(); ++i ) {
  346. tagged_index* next_index = reinterpret_cast< tagged_index* >( nodes + i );
  347. next_index->set_index( null_handle() );
  348. #ifdef BOOST_LOCKFREE_FREELIST_INIT_RUNS_DTOR
  349. destruct< false >( nodes + i );
  350. #else
  351. deallocate< false >( static_cast< index_t >( i ) );
  352. #endif
  353. }
  354. }
  355. public:
  356. typedef tagged_index tagged_node_handle;
  357. typedef tagged_index::index_t index_t;
  358. template < typename Allocator >
  359. fixed_size_freelist( Allocator const& alloc, std::size_t count ) :
  360. NodeStorage( alloc, count ),
  361. pool_( tagged_index( static_cast< index_t >( count ), 0 ) )
  362. {
  363. initialize();
  364. }
  365. fixed_size_freelist( void ) :
  366. pool_( tagged_index( NodeStorage::node_count(), 0 ) )
  367. {
  368. initialize();
  369. }
  370. template < bool ThreadSafe, bool Bounded >
  371. T* construct( void )
  372. {
  373. index_t node_index = allocate< ThreadSafe >();
  374. if ( node_index == null_handle() )
  375. return NULL;
  376. T* node = NodeStorage::nodes() + node_index;
  377. new ( node ) T();
  378. return node;
  379. }
  380. template < bool ThreadSafe, bool Bounded, typename ArgumentType >
  381. T* construct( const ArgumentType& arg )
  382. {
  383. index_t node_index = allocate< ThreadSafe >();
  384. if ( node_index == null_handle() )
  385. return NULL;
  386. T* node = NodeStorage::nodes() + node_index;
  387. new ( node ) T( arg );
  388. return node;
  389. }
  390. template < bool ThreadSafe, bool Bounded, typename ArgumentType >
  391. T* construct( ArgumentType&& arg )
  392. {
  393. index_t node_index = allocate< ThreadSafe >();
  394. if ( node_index == null_handle() )
  395. return NULL;
  396. T* node = NodeStorage::nodes() + node_index;
  397. new ( node ) T( std::forward< ArgumentType >( arg ) );
  398. return node;
  399. }
  400. template < bool ThreadSafe, bool Bounded, typename ArgumentType1, typename ArgumentType2 >
  401. T* construct( const ArgumentType1& arg1, const ArgumentType2& arg2 )
  402. {
  403. index_t node_index = allocate< ThreadSafe >();
  404. if ( node_index == null_handle() )
  405. return NULL;
  406. T* node = NodeStorage::nodes() + node_index;
  407. new ( node ) T( arg1, arg2 );
  408. return node;
  409. }
  410. template < bool ThreadSafe >
  411. void destruct( tagged_node_handle tagged_index )
  412. {
  413. index_t index = tagged_index.get_index();
  414. T* n = NodeStorage::nodes() + index;
  415. (void)n; // silence msvc warning
  416. n->~T();
  417. deallocate< ThreadSafe >( index );
  418. }
  419. template < bool ThreadSafe >
  420. void destruct( T* n )
  421. {
  422. n->~T();
  423. deallocate< ThreadSafe >( static_cast< index_t >( n - NodeStorage::nodes() ) );
  424. }
  425. bool is_lock_free( void ) const
  426. {
  427. return pool_.is_lock_free();
  428. }
  429. index_t null_handle( void ) const
  430. {
  431. return static_cast< index_t >( NodeStorage::node_count() );
  432. }
  433. index_t get_handle( T* pointer ) const
  434. {
  435. if ( pointer == NULL )
  436. return null_handle();
  437. else
  438. return static_cast< index_t >( pointer - NodeStorage::nodes() );
  439. }
  440. index_t get_handle( tagged_node_handle const& handle ) const
  441. {
  442. return handle.get_index();
  443. }
  444. T* get_pointer( tagged_node_handle const& tptr ) const
  445. {
  446. return get_pointer( tptr.get_index() );
  447. }
  448. T* get_pointer( index_t index ) const
  449. {
  450. if ( index == null_handle() )
  451. return 0;
  452. else
  453. return NodeStorage::nodes() + index;
  454. }
  455. T* get_pointer( T* ptr ) const
  456. {
  457. return ptr;
  458. }
  459. protected: // allow use from subclasses
  460. template < bool ThreadSafe >
  461. index_t allocate( void )
  462. {
  463. if ( ThreadSafe )
  464. return allocate_impl();
  465. else
  466. return allocate_impl_unsafe();
  467. }
  468. private:
  469. index_t allocate_impl( void )
  470. {
  471. tagged_index old_pool = pool_.load( memory_order_consume );
  472. for ( ;; ) {
  473. index_t index = old_pool.get_index();
  474. if ( index == null_handle() )
  475. return index;
  476. T* old_node = NodeStorage::nodes() + index;
  477. tagged_index* next_index = reinterpret_cast< tagged_index* >( old_node );
  478. tagged_index new_pool( next_index->get_index(), old_pool.get_next_tag() );
  479. if ( pool_.compare_exchange_weak( old_pool, new_pool ) )
  480. return old_pool.get_index();
  481. }
  482. }
  483. index_t allocate_impl_unsafe( void )
  484. {
  485. tagged_index old_pool = pool_.load( memory_order_consume );
  486. index_t index = old_pool.get_index();
  487. if ( index == null_handle() )
  488. return index;
  489. T* old_node = NodeStorage::nodes() + index;
  490. tagged_index* next_index = reinterpret_cast< tagged_index* >( old_node );
  491. tagged_index new_pool( next_index->get_index(), old_pool.get_next_tag() );
  492. pool_.store( new_pool, memory_order_relaxed );
  493. return old_pool.get_index();
  494. }
  495. template < bool ThreadSafe >
  496. void deallocate( index_t index )
  497. {
  498. if ( ThreadSafe )
  499. deallocate_impl( index );
  500. else
  501. deallocate_impl_unsafe( index );
  502. }
  503. void deallocate_impl( index_t index )
  504. {
  505. freelist_node* new_pool_node = reinterpret_cast< freelist_node* >( NodeStorage::nodes() + index );
  506. tagged_index old_pool = pool_.load( memory_order_consume );
  507. for ( ;; ) {
  508. tagged_index new_pool( index, old_pool.get_tag() );
  509. new_pool_node->next.set_index( old_pool.get_index() );
  510. if ( pool_.compare_exchange_weak( old_pool, new_pool ) )
  511. return;
  512. }
  513. }
  514. void deallocate_impl_unsafe( index_t index )
  515. {
  516. freelist_node* new_pool_node = reinterpret_cast< freelist_node* >( NodeStorage::nodes() + index );
  517. tagged_index old_pool = pool_.load( memory_order_consume );
  518. tagged_index new_pool( index, old_pool.get_tag() );
  519. new_pool_node->next.set_index( old_pool.get_index() );
  520. pool_.store( new_pool );
  521. }
  522. atomic< tagged_index > pool_;
  523. };
  524. //----------------------------------------------------------------------------------------------------------------------
  525. template < typename T, typename Alloc, bool IsCompileTimeSized, bool IsFixedSize, std::size_t Capacity >
  526. struct select_freelist
  527. {
  528. typedef std::conditional_t< IsCompileTimeSized,
  529. compiletime_sized_freelist_storage< T, Capacity >,
  530. runtime_sized_freelist_storage< T, Alloc > >
  531. fixed_sized_storage_type;
  532. typedef std::conditional_t< IsCompileTimeSized || IsFixedSize,
  533. fixed_size_freelist< T, fixed_sized_storage_type >,
  534. freelist_stack< T, Alloc > >
  535. type;
  536. };
  537. template < typename T, typename Alloc, bool IsCompileTimeSized, bool IsFixedSize, std::size_t Capacity >
  538. using select_freelist_t = typename select_freelist< T, Alloc, IsCompileTimeSized, IsFixedSize, Capacity >::type;
  539. //----------------------------------------------------------------------------------------------------------------------
  540. template < typename T, bool IsNodeBased >
  541. struct select_tagged_handle
  542. {
  543. typedef std::conditional_t< IsNodeBased, tagged_ptr< T >, tagged_index > tagged_handle_type;
  544. typedef std::conditional_t< IsNodeBased, T*, typename tagged_index::index_t > handle_type;
  545. };
  546. //----------------------------------------------------------------------------------------------------------------------
  547. }}} // namespace boost::lockfree::detail
  548. #if defined( _MSC_VER )
  549. # pragma warning( pop )
  550. #endif
  551. #endif /* BOOST_LOCKFREE_FREELIST_HPP_INCLUDED */