adaptive_node_pool_impl.hpp 53 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261
  1. //////////////////////////////////////////////////////////////////////////////
  2. //
  3. // (C) Copyright Ion Gaztanaga 2005-2013. Distributed under the Boost
  4. // Software License, Version 1.0. (See accompanying file
  5. // LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
  6. //
  7. // See http://www.boost.org/libs/container for documentation.
  8. //
  9. //////////////////////////////////////////////////////////////////////////////
  10. #ifndef BOOST_CONTAINER_DETAIL_ADAPTIVE_NODE_POOL_IMPL_HPP
  11. #define BOOST_CONTAINER_DETAIL_ADAPTIVE_NODE_POOL_IMPL_HPP
  12. #ifndef BOOST_CONFIG_HPP
  13. # include <boost/config.hpp>
  14. #endif
  15. #if defined(BOOST_HAS_PRAGMA_ONCE)
  16. # pragma once
  17. #endif
  18. #include <boost/container/detail/config_begin.hpp>
  19. #include <boost/container/detail/workaround.hpp>
  20. // container
  21. #include <boost/container/container_fwd.hpp>
  22. #include <boost/container/throw_exception.hpp>
  23. // container/detail
  24. #include <boost/container/detail/pool_common.hpp>
  25. #include <boost/container/detail/iterator.hpp>
  26. #include <boost/move/detail/iterator_to_raw_pointer.hpp>
  27. #include <boost/container/detail/math_functions.hpp>
  28. #include <boost/container/detail/placement_new.hpp>
  29. #include <boost/container/detail/mpl.hpp>
  30. #include <boost/move/detail/to_raw_pointer.hpp>
  31. #include <boost/move/detail/force_ptr.hpp>
  32. #include <boost/container/detail/type_traits.hpp>
  33. // intrusive
  34. #include <boost/intrusive/pointer_traits.hpp>
  35. #include <boost/intrusive/set.hpp>
  36. #include <boost/intrusive/list.hpp>
  37. #include <boost/intrusive/slist.hpp>
  38. // other
  39. #include <boost/assert.hpp>
  40. #include <cstddef>
  41. namespace boost {
  42. namespace container {
  43. namespace adaptive_pool_flag {
  44. BOOST_CONTAINER_CONSTANT_VAR unsigned int none = 0u;
  45. BOOST_CONTAINER_CONSTANT_VAR unsigned int align_only = 1u << 0u;
  46. BOOST_CONTAINER_CONSTANT_VAR unsigned int size_ordered = 1u << 1u;
  47. BOOST_CONTAINER_CONSTANT_VAR unsigned int address_ordered = 1u << 2u;
  48. } //namespace adaptive_pool_flag{
  49. namespace dtl {
  50. template<class size_type>
  51. struct hdr_offset_holder_t
  52. {
  53. hdr_offset_holder_t(size_type offset = 0)
  54. : hdr_offset(offset)
  55. {}
  56. size_type hdr_offset;
  57. };
  58. template<class SizeType, unsigned int Flags>
  59. struct less_func;
  60. template<class SizeType>
  61. struct less_func<SizeType, adaptive_pool_flag::none>
  62. {
  63. static bool less(SizeType, SizeType, const void *, const void *)
  64. { return true; }
  65. };
  66. template<class SizeType>
  67. struct less_func<SizeType, adaptive_pool_flag::size_ordered>
  68. {
  69. static bool less(SizeType ls, SizeType rs, const void *, const void *)
  70. { return ls < rs; }
  71. };
  72. template<class SizeType>
  73. struct less_func<SizeType, adaptive_pool_flag::address_ordered>
  74. {
  75. static bool less(SizeType, SizeType, const void *la, const void *ra)
  76. { return la < ra; }
  77. };
  78. template<class SizeType>
  79. struct less_func<SizeType, adaptive_pool_flag::size_ordered | adaptive_pool_flag::address_ordered>
  80. {
  81. static bool less(SizeType ls, SizeType rs, const void *la, const void *ra)
  82. { return (ls < rs) || ((ls == rs) && (la < ra)); }
  83. };
  84. template<class VoidPointer, class SizeType, unsigned OrderFlags>
  85. struct block_container_traits
  86. {
  87. typedef typename bi::make_set_base_hook
  88. < bi::void_pointer<VoidPointer>
  89. , bi::optimize_size<true>
  90. , bi::link_mode<bi::normal_link> >::type hook_t;
  91. template<class T>
  92. struct container
  93. {
  94. typedef typename bi::make_multiset
  95. <T, bi::base_hook<hook_t>, bi::size_type<SizeType> >::type type;
  96. };
  97. template<class Container>
  98. static void reinsert_was_used(Container &container, typename Container::reference v, bool)
  99. {
  100. typedef typename Container::const_iterator const_block_iterator;
  101. typedef typename Container::iterator block_iterator;
  102. typedef typename Container::value_compare value_compare;
  103. const block_iterator this_block(Container::s_iterator_to(v));
  104. const const_block_iterator cendit(container.cend());
  105. block_iterator next_block(this_block);
  106. if(++next_block != cendit && value_compare()(*next_block, v)){
  107. const_block_iterator next2_block(next_block);
  108. //Test if v should be swapped with next (optimization)
  109. if(++next2_block == cendit || !value_compare()(*next2_block, v)){
  110. v.swap_nodes(*next_block);
  111. BOOST_ASSERT(++next_block == this_block);
  112. }
  113. else{
  114. container.erase(this_block);
  115. container.insert(v);
  116. }
  117. }
  118. }
  119. template<class Container>
  120. static void insert_was_empty(Container &container, typename Container::value_type &v, bool)
  121. {
  122. container.insert(v);
  123. }
  124. template<class Container>
  125. static void erase_first(Container &container)
  126. {
  127. container.erase(container.cbegin());
  128. }
  129. template<class Container>
  130. static void erase_last(Container &container)
  131. {
  132. container.erase(--container.cend());
  133. }
  134. };
  135. template<class VoidPointer, class SizeType>
  136. struct block_container_traits<VoidPointer, SizeType, 0u>
  137. {
  138. typedef typename bi::make_list_base_hook
  139. < bi::void_pointer<VoidPointer>
  140. , bi::link_mode<bi::normal_link> >::type hook_t;
  141. template<class T>
  142. struct container
  143. {
  144. typedef typename bi::make_list
  145. <T, bi::base_hook<hook_t>, bi::size_type<SizeType>, bi::constant_time_size<false> >::type type;
  146. };
  147. template<class Container>
  148. static void reinsert_was_used(Container &container, typename Container::value_type &v, bool is_full)
  149. {
  150. if(is_full){
  151. container.erase(Container::s_iterator_to(v));
  152. container.push_back(v);
  153. }
  154. }
  155. template<class Container>
  156. static void insert_was_empty(Container &container, typename Container::value_type &v, bool is_full)
  157. {
  158. if(is_full){
  159. container.push_back(v);
  160. }
  161. else{
  162. container.push_front(v);
  163. }
  164. }
  165. template<class Container>
  166. static void erase_first(Container &container)
  167. {
  168. container.pop_front();
  169. }
  170. template<class Container>
  171. static void erase_last(Container &container)
  172. {
  173. container.pop_back();
  174. }
  175. };
  176. /////////////////////////////
  177. //
  178. // adaptive_pool_types
  179. //
  180. /////////////////////////////
  181. template<class MultiallocationChain, class VoidPointer, class SizeType, unsigned int Flags>
  182. struct adaptive_pool_types
  183. {
  184. typedef VoidPointer void_pointer;
  185. BOOST_STATIC_CONSTEXPR unsigned ordered = (Flags & (adaptive_pool_flag::size_ordered | adaptive_pool_flag::address_ordered));
  186. typedef block_container_traits<VoidPointer, SizeType, ordered> block_container_traits_t;
  187. typedef typename block_container_traits_t::hook_t hook_t;
  188. typedef hdr_offset_holder_t<SizeType> hdr_offset_holder;
  189. BOOST_STATIC_CONSTEXPR unsigned int order_flags = Flags & (adaptive_pool_flag::size_ordered | adaptive_pool_flag::address_ordered);
  190. typedef MultiallocationChain free_nodes_t;
  191. struct block_info_t
  192. : public hdr_offset_holder,
  193. public hook_t
  194. {
  195. //An intrusive list of free node from this block
  196. free_nodes_t free_nodes;
  197. friend bool operator <(const block_info_t &l, const block_info_t &r)
  198. {
  199. return less_func<SizeType, order_flags>::
  200. less(l.free_nodes.size(), r.free_nodes.size(), &l , &r);
  201. }
  202. friend bool operator ==(const block_info_t &l, const block_info_t &r)
  203. { return &l == &r; }
  204. };
  205. typedef typename block_container_traits_t:: template container<block_info_t>::type block_container_t;
  206. };
  207. /////////////////////////////////////////////
  208. //
  209. // candidate_power_of_2_ct
  210. //
  211. /////////////////////////////////////////////
  212. template< std::size_t alignment
  213. , std::size_t real_node_size
  214. , std::size_t payload_per_allocation
  215. , std::size_t min_elements_per_block
  216. , std::size_t hdr_size
  217. , std::size_t hdr_offset_size
  218. , std::size_t overhead_percent>
  219. struct candidate_power_of_2_ct_helper
  220. {
  221. BOOST_STATIC_CONSTEXPR std::size_t hdr_subblock_elements_alone = (alignment - hdr_size - payload_per_allocation)/real_node_size;
  222. BOOST_STATIC_CONSTEXPR std::size_t hdr_subblock_elements_first = (alignment - hdr_size - payload_per_allocation)/real_node_size;
  223. BOOST_STATIC_CONSTEXPR std::size_t elements_per_b_subblock_mid = (alignment - hdr_offset_size)/real_node_size;
  224. BOOST_STATIC_CONSTEXPR std::size_t elements_per_b_subblock_end = (alignment - hdr_offset_size - payload_per_allocation)/real_node_size;
  225. BOOST_STATIC_CONSTEXPR std::size_t num_b_subblock =
  226. hdr_subblock_elements_alone >= min_elements_per_block
  227. ? 0
  228. : ( ((hdr_subblock_elements_first + elements_per_b_subblock_end) >= min_elements_per_block)
  229. ? 1
  230. : 2 + (min_elements_per_block - hdr_subblock_elements_first - elements_per_b_subblock_end - 1)/elements_per_b_subblock_mid
  231. )
  232. ;
  233. BOOST_STATIC_CONSTEXPR std::size_t num_b_subblock_mid = (num_b_subblock > 1) ? (num_b_subblock - 1) : 0;
  234. BOOST_STATIC_CONSTEXPR std::size_t total_nodes = (num_b_subblock == 0)
  235. ? hdr_subblock_elements_alone
  236. : ( (num_b_subblock == 1)
  237. ? (hdr_subblock_elements_first + elements_per_b_subblock_end)
  238. : (hdr_subblock_elements_first + num_b_subblock_mid*elements_per_b_subblock_mid + elements_per_b_subblock_end)
  239. )
  240. ;
  241. BOOST_STATIC_CONSTEXPR std::size_t total_data = total_nodes*real_node_size;
  242. BOOST_STATIC_CONSTEXPR std::size_t total_size = alignment*(num_b_subblock+1);
  243. BOOST_STATIC_CONSTEXPR bool overhead_satisfied = (total_size - total_data)*100/total_size < overhead_percent;
  244. };
  245. template< std::size_t initial_alignment
  246. , std::size_t real_node_size
  247. , std::size_t payload_per_allocation
  248. , std::size_t min_elements_per_block
  249. , std::size_t hdr_size
  250. , std::size_t hdr_offset_size
  251. , std::size_t overhead_percent
  252. , bool Loop = true>
  253. struct candidate_power_of_2_ct
  254. {
  255. typedef candidate_power_of_2_ct_helper
  256. < initial_alignment
  257. , real_node_size
  258. , payload_per_allocation
  259. , min_elements_per_block
  260. , hdr_size
  261. , hdr_offset_size
  262. , overhead_percent> helper_t;
  263. BOOST_STATIC_CONSTEXPR std::size_t candidate_power_of_2 = initial_alignment << std::size_t(!helper_t::overhead_satisfied);
  264. typedef typename candidate_power_of_2_ct
  265. < candidate_power_of_2
  266. , real_node_size
  267. , payload_per_allocation
  268. , min_elements_per_block
  269. , hdr_size
  270. , hdr_offset_size
  271. , overhead_percent
  272. , !helper_t::overhead_satisfied
  273. >::type type;
  274. BOOST_STATIC_CONSTEXPR std::size_t alignment = type::alignment;
  275. BOOST_STATIC_CONSTEXPR std::size_t num_subblocks = type::num_subblocks;
  276. BOOST_STATIC_CONSTEXPR std::size_t real_num_node = type::real_num_node;
  277. };
  278. template< std::size_t initial_alignment
  279. , std::size_t real_node_size
  280. , std::size_t payload_per_allocation
  281. , std::size_t min_elements_per_block
  282. , std::size_t hdr_size
  283. , std::size_t hdr_offset_size
  284. , std::size_t overhead_percent
  285. >
  286. struct candidate_power_of_2_ct
  287. < initial_alignment
  288. , real_node_size
  289. , payload_per_allocation
  290. , min_elements_per_block
  291. , hdr_size
  292. , hdr_offset_size
  293. , overhead_percent
  294. , false>
  295. {
  296. typedef candidate_power_of_2_ct
  297. < initial_alignment
  298. , real_node_size
  299. , payload_per_allocation
  300. , min_elements_per_block
  301. , hdr_size
  302. , hdr_offset_size
  303. , overhead_percent
  304. , false> type;
  305. typedef candidate_power_of_2_ct_helper
  306. < initial_alignment
  307. , real_node_size
  308. , payload_per_allocation
  309. , min_elements_per_block
  310. , hdr_size
  311. , hdr_offset_size
  312. , overhead_percent> helper_t;
  313. BOOST_STATIC_CONSTEXPR std::size_t alignment = initial_alignment;
  314. BOOST_STATIC_CONSTEXPR std::size_t num_subblocks = helper_t::num_b_subblock+1;
  315. BOOST_STATIC_CONSTEXPR std::size_t real_num_node = helper_t::total_nodes;
  316. };
  317. /////////////////////////////////////////////
  318. //
  319. // candidate_power_of_2_rt
  320. //
  321. /////////////////////////////////////////////
  322. inline void candidate_power_of_2_rt ( std::size_t initial_alignment
  323. , std::size_t real_node_size
  324. , std::size_t payload_per_allocation
  325. , std::size_t min_elements_per_block
  326. , std::size_t hdr_size
  327. , std::size_t hdr_offset_size
  328. , std::size_t overhead_percent
  329. , std::size_t &alignment
  330. , std::size_t &num_subblocks
  331. , std::size_t &real_num_node)
  332. {
  333. bool overhead_satisfied = false;
  334. std::size_t num_b_subblock = 0;
  335. std::size_t total_nodes = 0;
  336. while(!overhead_satisfied)
  337. {
  338. std::size_t hdr_subblock_elements_alone = (initial_alignment - hdr_size - payload_per_allocation)/real_node_size;
  339. std::size_t hdr_subblock_elements_first = (initial_alignment - hdr_size - payload_per_allocation)/real_node_size;
  340. std::size_t elements_per_b_subblock_mid = (initial_alignment - hdr_offset_size)/real_node_size;
  341. std::size_t elements_per_b_subblock_end = (initial_alignment - hdr_offset_size - payload_per_allocation)/real_node_size;
  342. num_b_subblock =
  343. hdr_subblock_elements_alone >= min_elements_per_block
  344. ? 0
  345. : ( ((hdr_subblock_elements_first + elements_per_b_subblock_end) >= min_elements_per_block)
  346. ? 1
  347. : 2 + (min_elements_per_block - hdr_subblock_elements_first - elements_per_b_subblock_end - 1)/elements_per_b_subblock_mid
  348. )
  349. ;
  350. std::size_t num_b_subblock_mid = (num_b_subblock > 1) ? (num_b_subblock - 1) : 0;
  351. total_nodes = (num_b_subblock == 0)
  352. ? hdr_subblock_elements_alone
  353. : ( (num_b_subblock == 1)
  354. ? (hdr_subblock_elements_first + elements_per_b_subblock_end)
  355. : (hdr_subblock_elements_first + num_b_subblock_mid*elements_per_b_subblock_mid + elements_per_b_subblock_end)
  356. )
  357. ;
  358. std::size_t total_data = total_nodes*real_node_size;
  359. std::size_t total_size = initial_alignment*(num_b_subblock+1);
  360. overhead_satisfied = (total_size - total_data)*100/total_size < overhead_percent;
  361. initial_alignment = initial_alignment << std::size_t(!overhead_satisfied);
  362. }
  363. alignment = initial_alignment;
  364. num_subblocks = num_b_subblock+1;
  365. real_num_node = total_nodes;
  366. }
  367. /////////////////////////////////////////////
  368. //
  369. // private_adaptive_node_pool_impl_common
  370. //
  371. /////////////////////////////////////////////
  372. template< class SegmentManagerBase, unsigned int Flags>
  373. class private_adaptive_node_pool_impl_common
  374. {
  375. public:
  376. //!Segment manager typedef
  377. typedef SegmentManagerBase segment_manager_base_type;
  378. typedef typename SegmentManagerBase::multiallocation_chain multiallocation_chain;
  379. typedef typename SegmentManagerBase::size_type size_type;
  380. //Flags
  381. //align_only
  382. BOOST_STATIC_CONSTEXPR bool AlignOnly = (Flags & adaptive_pool_flag::align_only) != 0;
  383. typedef bool_<AlignOnly> IsAlignOnly;
  384. typedef true_ AlignOnlyTrue;
  385. typedef false_ AlignOnlyFalse;
  386. typedef typename SegmentManagerBase::void_pointer void_pointer;
  387. BOOST_STATIC_CONSTEXPR typename SegmentManagerBase::
  388. size_type PayloadPerAllocation = SegmentManagerBase::PayloadPerAllocation;
  389. typedef typename boost::intrusive::pointer_traits
  390. <void_pointer>::template rebind_pointer<segment_manager_base_type>::type segment_mngr_base_ptr_t;
  391. protected:
  392. typedef adaptive_pool_types
  393. <multiallocation_chain, void_pointer, size_type, Flags> adaptive_pool_types_t;
  394. typedef typename adaptive_pool_types_t::free_nodes_t free_nodes_t;
  395. typedef typename adaptive_pool_types_t::block_info_t block_info_t;
  396. typedef typename adaptive_pool_types_t::block_container_t block_container_t;
  397. typedef typename adaptive_pool_types_t::block_container_traits_t block_container_traits_t;
  398. typedef typename block_container_t::iterator block_iterator;
  399. typedef typename block_container_t::const_iterator const_block_iterator;
  400. typedef typename adaptive_pool_types_t::hdr_offset_holder hdr_offset_holder;
  401. typedef private_adaptive_node_pool_impl_common this_type;
  402. BOOST_STATIC_CONSTEXPR size_type MaxAlign = alignment_of<void_pointer>::value;
  403. BOOST_STATIC_CONSTEXPR size_type HdrSize = ((sizeof(block_info_t)-1)/MaxAlign+1)*MaxAlign;
  404. BOOST_STATIC_CONSTEXPR size_type HdrOffsetSize = ((sizeof(hdr_offset_holder)-1)/MaxAlign+1)*MaxAlign;
  405. segment_mngr_base_ptr_t mp_segment_mngr_base; //Segment manager
  406. block_container_t m_block_container; //Intrusive block list
  407. size_type m_totally_free_blocks; //Free blocks
  408. class block_destroyer;
  409. friend class block_destroyer;
  410. class block_destroyer
  411. {
  412. public:
  413. block_destroyer(const this_type *impl, multiallocation_chain &chain, const size_type num_subblocks, const size_type real_block_alignment, const size_type real_num_node)
  414. : mp_impl(impl), m_chain(chain), m_num_subblocks(num_subblocks), m_real_block_alignment(real_block_alignment), m_real_num_node(real_num_node)
  415. {}
  416. void operator()(typename block_container_t::pointer to_deallocate)
  417. { return this->do_destroy(to_deallocate, IsAlignOnly()); }
  418. private:
  419. void do_destroy(typename block_container_t::pointer to_deallocate, AlignOnlyTrue)
  420. {
  421. BOOST_ASSERT(to_deallocate->free_nodes.size() == m_real_num_node);
  422. m_chain.push_back(to_deallocate);
  423. }
  424. void do_destroy(typename block_container_t::pointer to_deallocate, AlignOnlyFalse)
  425. {
  426. BOOST_ASSERT(to_deallocate->free_nodes.size() == m_real_num_node);
  427. BOOST_ASSERT(0 == to_deallocate->hdr_offset);
  428. hdr_offset_holder *hdr_off_holder =
  429. mp_impl->priv_first_subblock_from_block(boost::movelib::to_raw_pointer(to_deallocate), m_num_subblocks, m_real_block_alignment);
  430. m_chain.push_back(hdr_off_holder);
  431. }
  432. const this_type *mp_impl;
  433. multiallocation_chain &m_chain;
  434. const size_type m_num_subblocks;
  435. const size_type m_real_block_alignment;
  436. const size_type m_real_num_node;
  437. };
  438. //This macro will activate invariant checking. Slow, but helpful for debugging the code.
  439. //#define BOOST_CONTAINER_ADAPTIVE_NODE_POOL_CHECK_INVARIANTS
  440. void priv_invariants(const size_type real_num_node, const size_type num_subblocks, const size_type real_block_alignment) const
  441. {
  442. (void)real_num_node; (void)num_subblocks; (void)real_block_alignment;
  443. #ifdef BOOST_CONTAINER_ADAPTIVE_NODE_POOL_CHECK_INVARIANTS
  444. //Check that the total totally free blocks are correct
  445. BOOST_ASSERT(m_block_container.size() >= m_totally_free_blocks);
  446. const const_block_iterator itend(m_block_container.cend());
  447. const const_block_iterator itbeg(m_block_container.cbegin());
  448. { //Try to do checks in a single iteration
  449. const_block_iterator it(itbeg);
  450. size_type total_free_nodes = 0;
  451. size_type total_free_blocks = 0u;
  452. for(; it != itend; ++it){
  453. if(it != itbeg){
  454. //Check order invariant
  455. const_block_iterator prev(it);
  456. --prev;
  457. BOOST_ASSERT(!(m_block_container.key_comp()(*it, *prev)));
  458. (void)prev; (void)it;
  459. }
  460. //free_nodes invariant
  461. const size_type free_nodes = it->free_nodes.size();
  462. BOOST_ASSERT(free_nodes <= real_num_node);
  463. BOOST_ASSERT(free_nodes != 0);
  464. //Acummulate total_free_nodes and total_free_blocks
  465. total_free_nodes += free_nodes;
  466. total_free_blocks += it->free_nodes.size() == real_num_node;
  467. if (!AlignOnly) {
  468. //Check that header offsets are correct
  469. hdr_offset_holder *hdr_off_holder = this->priv_first_subblock_from_block(const_cast<block_info_t *>(&*it), num_subblocks, real_block_alignment);
  470. for (size_type i = 0, max = num_subblocks; i < max; ++i) {
  471. const size_type offset = size_type(reinterpret_cast<char*>(const_cast<block_info_t *>(&*it)) - reinterpret_cast<char*>(hdr_off_holder));
  472. (void)offset;
  473. BOOST_ASSERT(hdr_off_holder->hdr_offset == offset);
  474. BOOST_ASSERT(0 == (reinterpret_cast<std::size_t>(hdr_off_holder) & (real_block_alignment - 1)));
  475. BOOST_ASSERT(0 == (hdr_off_holder->hdr_offset & (real_block_alignment - 1)));
  476. hdr_off_holder = move_detail::force_ptr<hdr_offset_holder *>(reinterpret_cast<char*>(hdr_off_holder) + real_block_alignment);
  477. }
  478. }
  479. }
  480. BOOST_ASSERT(total_free_blocks == m_totally_free_blocks);
  481. BOOST_ASSERT(total_free_nodes >= m_totally_free_blocks*real_num_node);
  482. }
  483. #endif
  484. }
  485. void priv_deallocate_free_blocks( const size_type max_free_blocks, const size_type real_num_node
  486. , const size_type num_subblocks, const size_type real_block_alignment)
  487. { //Trampoline function to ease inlining
  488. if(m_totally_free_blocks > max_free_blocks){
  489. this->priv_deallocate_free_blocks_impl(max_free_blocks, real_num_node, num_subblocks, real_block_alignment);
  490. }
  491. }
  492. hdr_offset_holder *priv_first_subblock_from_block(block_info_t *block, const size_type num_subblocks, const size_type real_block_alignment) const
  493. { return this->priv_first_subblock_from_block(block, num_subblocks, real_block_alignment, IsAlignOnly()); }
  494. hdr_offset_holder *priv_first_subblock_from_block(block_info_t *block, const size_type num_subblocks, const size_type real_block_alignment, AlignOnlyFalse) const
  495. {
  496. hdr_offset_holder *const hdr_off_holder = move_detail::force_ptr<hdr_offset_holder*>
  497. (reinterpret_cast<char*>(block) - (num_subblocks-1)*real_block_alignment);
  498. BOOST_ASSERT(hdr_off_holder->hdr_offset == size_type(reinterpret_cast<char*>(block) - reinterpret_cast<char*>(hdr_off_holder)));
  499. BOOST_ASSERT(0 == ((std::size_t)hdr_off_holder & (real_block_alignment - 1)));
  500. BOOST_ASSERT(0 == (hdr_off_holder->hdr_offset & (real_block_alignment - 1)));
  501. return hdr_off_holder;
  502. }
  503. hdr_offset_holder *priv_first_subblock_from_block(block_info_t *block, const size_type num_subblocks, const size_type real_block_alignment, AlignOnlyTrue) const
  504. {
  505. (void)num_subblocks; (void)real_block_alignment;
  506. return move_detail::force_ptr<hdr_offset_holder*>(block);
  507. }
  508. void priv_deallocate_free_blocks_impl( const size_type max_free_blocks, const size_type real_num_node
  509. , const size_type num_subblocks, const size_type real_block_alignment)
  510. {
  511. this->priv_invariants(real_num_node, num_subblocks, real_block_alignment);
  512. //Now check if we've reached the free nodes limit
  513. //and check if we have free blocks. If so, deallocate as much
  514. //as we can to stay below the limit
  515. multiallocation_chain chain;
  516. {
  517. if(Flags & adaptive_pool_flag::size_ordered){
  518. const_block_iterator it = m_block_container.cend();
  519. --it;
  520. size_type totally_free_blocks = m_totally_free_blocks;
  521. for( ; totally_free_blocks > max_free_blocks; --totally_free_blocks){
  522. BOOST_ASSERT(it->free_nodes.size() == real_num_node);
  523. void *addr = priv_first_subblock_from_block(const_cast<block_info_t*>(&*it), num_subblocks, real_block_alignment);
  524. --it;
  525. block_container_traits_t::erase_last(m_block_container);
  526. chain.push_front(void_pointer(addr));
  527. }
  528. }
  529. else{
  530. const_block_iterator it = m_block_container.cend();
  531. size_type totally_free_blocks = m_totally_free_blocks;
  532. while(totally_free_blocks > max_free_blocks){
  533. --it;
  534. if(it->free_nodes.size() == real_num_node){
  535. void *addr = priv_first_subblock_from_block(const_cast<block_info_t*>(&*it), num_subblocks, real_block_alignment);
  536. it = m_block_container.erase(it);
  537. chain.push_front(void_pointer(addr));
  538. --totally_free_blocks;
  539. }
  540. }
  541. }
  542. BOOST_ASSERT((m_totally_free_blocks - max_free_blocks) == chain.size());
  543. m_totally_free_blocks = max_free_blocks;
  544. }
  545. this->mp_segment_mngr_base->deallocate_many(chain);
  546. this->priv_invariants(real_num_node, num_subblocks, real_block_alignment);
  547. }
  548. void priv_fill_chain_remaining_to_block
  549. ( multiallocation_chain &chain, size_type target_elem_in_chain, block_info_t &c_info
  550. , char *mem_address, size_type max_node_in_mem
  551. , const size_type real_node_size)
  552. {
  553. BOOST_ASSERT(chain.size() <= target_elem_in_chain);
  554. //First add all possible nodes to the chain
  555. const size_type left = target_elem_in_chain - chain.size();
  556. const size_type add_to_chain = (max_node_in_mem < left) ? max_node_in_mem : left;
  557. char *free_mem_address = static_cast<char *>(boost::movelib::to_raw_pointer
  558. (chain.incorporate_after(chain.last(), void_pointer(mem_address), real_node_size, add_to_chain)));
  559. //Now store remaining nodes in the free list
  560. if(const size_type free = max_node_in_mem - add_to_chain){
  561. free_nodes_t & free_nodes = c_info.free_nodes;
  562. free_nodes.incorporate_after(free_nodes.last(), void_pointer(free_mem_address), real_node_size, free);
  563. }
  564. }
  565. //!Allocates a several blocks of nodes. Can throw
  566. void priv_append_from_new_blocks( size_type min_elements, multiallocation_chain &chain
  567. , const size_type max_free_blocks
  568. , const size_type real_block_alignment, const size_type real_node_size
  569. , const size_type real_num_node, const size_type num_subblocks
  570. , AlignOnlyTrue)
  571. {
  572. (void)num_subblocks;
  573. BOOST_ASSERT(m_block_container.empty());
  574. BOOST_ASSERT(min_elements > 0);
  575. const size_type n = (min_elements - 1)/real_num_node + 1;
  576. const size_type real_block_size = real_block_alignment - PayloadPerAllocation;
  577. const size_type target_elem_in_chain = chain.size() + min_elements;
  578. for(size_type i = 0; i != n; ++i){
  579. //We allocate a new NodeBlock and put it the last
  580. //element of the tree
  581. char *mem_address = static_cast<char*>
  582. (mp_segment_mngr_base->allocate_aligned(real_block_size, real_block_alignment));
  583. if(!mem_address){
  584. //In case of error, free memory deallocating all nodes (the new ones allocated
  585. //in this function plus previously stored nodes in chain).
  586. this->priv_deallocate_nodes(chain, max_free_blocks, real_num_node, num_subblocks, real_block_alignment);
  587. throw_bad_alloc();
  588. }
  589. block_info_t &c_info = *new(mem_address, boost_container_new_t())block_info_t();
  590. mem_address += HdrSize;
  591. this->priv_fill_chain_remaining_to_block(chain, target_elem_in_chain, c_info, mem_address, real_num_node, real_node_size);
  592. const size_type free_nodes = c_info.free_nodes.size();
  593. if(free_nodes){
  594. const bool is_full = free_nodes == real_num_node;
  595. BOOST_ASSERT(free_nodes < real_num_node);
  596. m_totally_free_blocks += static_cast<size_type>(is_full);
  597. block_container_traits_t::insert_was_empty(m_block_container, c_info, is_full);
  598. }
  599. }
  600. }
  601. void priv_append_from_new_blocks( size_type min_elements, multiallocation_chain &chain
  602. , const size_type max_free_blocks
  603. , const size_type real_block_alignment, const size_type real_node_size
  604. , const size_type real_num_node, const size_type num_subblocks
  605. , AlignOnlyFalse)
  606. {
  607. BOOST_ASSERT(m_block_container.empty());
  608. BOOST_ASSERT(min_elements > 0);
  609. const size_type n = (min_elements - 1)/real_num_node + 1;
  610. const size_type real_block_size = real_block_alignment*num_subblocks - PayloadPerAllocation;
  611. const size_type elements_per_subblock_mid = (real_block_alignment - HdrOffsetSize)/real_node_size;
  612. const size_type elements_per_subblock_end = (real_block_alignment - HdrOffsetSize - PayloadPerAllocation) / real_node_size;
  613. const size_type hdr_subblock_elements = (real_block_alignment - HdrSize - PayloadPerAllocation)/real_node_size;
  614. const size_type target_elem_in_chain = chain.size() + min_elements;
  615. for(size_type i = 0; i != n; ++i){
  616. //We allocate a new NodeBlock and put it the last
  617. //element of the tree
  618. char *mem_address = static_cast<char*>
  619. (mp_segment_mngr_base->allocate_aligned(real_block_size, real_block_alignment));
  620. if(!mem_address){
  621. //In case of error, free memory deallocating all nodes (the new ones allocated
  622. //in this function plus previously stored nodes in chain).
  623. this->priv_deallocate_nodes(chain, max_free_blocks, real_num_node, num_subblocks, real_block_alignment);
  624. throw_bad_alloc();
  625. }
  626. //First initialize header information on the last subblock
  627. char *hdr_addr = mem_address + real_block_alignment*(num_subblocks-1);
  628. block_info_t &c_info = *new(hdr_addr, boost_container_new_t())block_info_t();
  629. //Some structural checks
  630. BOOST_ASSERT(static_cast<void*>(&static_cast<hdr_offset_holder&>(c_info).hdr_offset) ==
  631. static_cast<void*>(&c_info)); (void)c_info;
  632. for( size_type subblock = 0, maxsubblock = num_subblocks - 1
  633. ; subblock < maxsubblock
  634. ; ++subblock, mem_address += real_block_alignment){
  635. //Initialize header offset mark
  636. new(mem_address, boost_container_new_t()) hdr_offset_holder(size_type(hdr_addr - mem_address));
  637. const size_type elements_per_subblock = (subblock != (maxsubblock - 1)) ? elements_per_subblock_mid : elements_per_subblock_end;
  638. this->priv_fill_chain_remaining_to_block
  639. (chain, target_elem_in_chain, c_info, mem_address + HdrOffsetSize, elements_per_subblock, real_node_size);
  640. }
  641. this->priv_fill_chain_remaining_to_block
  642. (chain, target_elem_in_chain, c_info, hdr_addr + HdrSize, hdr_subblock_elements, real_node_size);
  643. m_totally_free_blocks += static_cast<size_type>(c_info.free_nodes.size() == real_num_node);
  644. if (c_info.free_nodes.size())
  645. m_block_container.push_front(c_info);
  646. }
  647. }
  648. //!Allocates array of count elements. Can throw
  649. void *priv_allocate_node( const size_type max_free_blocks, const size_type real_block_alignment, const size_type real_node_size
  650. , const size_type real_num_node, const size_type num_subblocks)
  651. {
  652. this->priv_invariants(real_num_node, num_subblocks, real_block_alignment);
  653. //If there are no free nodes we allocate a new block
  654. if(!m_block_container.empty()){
  655. //We take the first free node the multiset can't be empty
  656. free_nodes_t &free_nodes = m_block_container.begin()->free_nodes;
  657. BOOST_ASSERT(!free_nodes.empty());
  658. const size_type free_nodes_count = free_nodes.size();
  659. void *first_node = boost::movelib::to_raw_pointer(free_nodes.pop_front());
  660. if(free_nodes.empty()){
  661. block_container_traits_t::erase_first(m_block_container);
  662. }
  663. m_totally_free_blocks -= static_cast<size_type>(free_nodes_count == real_num_node);
  664. this->priv_invariants(real_num_node, num_subblocks, real_block_alignment);
  665. return first_node;
  666. }
  667. else{
  668. multiallocation_chain chain;
  669. this->priv_append_from_new_blocks
  670. (1, chain, max_free_blocks, real_block_alignment, real_node_size, real_num_node, num_subblocks, IsAlignOnly());
  671. void *node = boost::movelib::to_raw_pointer(chain.pop_front());
  672. this->priv_invariants(real_num_node, num_subblocks, real_block_alignment);
  673. return node;
  674. }
  675. }
  676. void priv_allocate_nodes( const size_type n, multiallocation_chain &chain
  677. , const size_type max_free_blocks, const size_type real_block_alignment, const size_type real_node_size
  678. , const size_type real_num_node, const size_type num_subblocks)
  679. {
  680. size_type i = 0;
  681. BOOST_CONTAINER_TRY{
  682. this->priv_invariants(real_num_node, num_subblocks, real_block_alignment);
  683. while(i != n){
  684. //If there are no free nodes we allocate all needed blocks
  685. if (m_block_container.empty()){
  686. this->priv_append_from_new_blocks
  687. (n - i, chain, max_free_blocks, real_block_alignment, real_node_size, real_num_node, num_subblocks, IsAlignOnly());
  688. BOOST_ASSERT(m_block_container.empty() || (++m_block_container.cbegin() == m_block_container.cend()));
  689. BOOST_ASSERT(chain.size() == n);
  690. break;
  691. }
  692. free_nodes_t &free_nodes = m_block_container.begin()->free_nodes;
  693. const size_type free_nodes_count_before = free_nodes.size();
  694. m_totally_free_blocks -= static_cast<size_type>(free_nodes_count_before == real_num_node);
  695. const size_type num_left = n-i;
  696. const size_type num_elems = (num_left < free_nodes_count_before) ? num_left : free_nodes_count_before;
  697. typedef typename free_nodes_t::iterator free_nodes_iterator;
  698. if(num_left < free_nodes_count_before){
  699. const free_nodes_iterator it_bbeg(free_nodes.before_begin());
  700. free_nodes_iterator it_bend(it_bbeg);
  701. for(size_type j = 0; j != num_elems; ++j){
  702. ++it_bend;
  703. }
  704. free_nodes_iterator it_end = it_bend; ++it_end;
  705. free_nodes_iterator it_beg = it_bbeg; ++it_beg;
  706. free_nodes.erase_after(it_bbeg, it_end, num_elems);
  707. chain.incorporate_after(chain.last(), &*it_beg, &*it_bend, num_elems);
  708. //chain.splice_after(chain.last(), free_nodes, it_bbeg, it_bend, num_elems);
  709. BOOST_ASSERT(!free_nodes.empty());
  710. }
  711. else{
  712. const free_nodes_iterator it_beg(free_nodes.begin()), it_bend(free_nodes.last());
  713. free_nodes.clear();
  714. chain.incorporate_after(chain.last(), &*it_beg, &*it_bend, num_elems);
  715. block_container_traits_t::erase_first(m_block_container);
  716. }
  717. i += num_elems;
  718. }
  719. }
  720. BOOST_CONTAINER_CATCH(...){
  721. this->priv_deallocate_nodes(chain, max_free_blocks, real_num_node, num_subblocks, real_block_alignment);
  722. this->priv_invariants(real_num_node, num_subblocks, real_block_alignment);
  723. BOOST_CONTAINER_RETHROW
  724. }
  725. BOOST_CONTAINER_CATCH_END
  726. this->priv_invariants(real_num_node, num_subblocks, real_block_alignment);
  727. }
  728. //!Deallocates an array pointed by ptr. Never throws
  729. void priv_deallocate_node( void *pElem
  730. , const size_type max_free_blocks, const size_type real_num_node
  731. , const size_type num_subblocks, const size_type real_block_alignment)
  732. {
  733. this->priv_invariants(real_num_node, num_subblocks, real_block_alignment);
  734. block_info_t &block_info = *this->priv_block_from_node(pElem, real_block_alignment);
  735. const size_type prev_free_nodes = block_info.free_nodes.size();
  736. BOOST_ASSERT(block_info.free_nodes.size() < real_num_node);
  737. //We put the node at the beginning of the free node list
  738. block_info.free_nodes.push_back(void_pointer(pElem));
  739. //The loop reinserts all blocks except the last one
  740. this->priv_reinsert_block(block_info, prev_free_nodes == 0, real_num_node);
  741. this->priv_deallocate_free_blocks(max_free_blocks, real_num_node, num_subblocks, real_block_alignment);
  742. this->priv_invariants(real_num_node, num_subblocks, real_block_alignment);
  743. }
  744. void priv_deallocate_nodes( multiallocation_chain &nodes
  745. , const size_type max_free_blocks, const size_type real_num_node
  746. , const size_type num_subblocks, const size_type real_block_alignment)
  747. {
  748. this->priv_invariants(real_num_node, num_subblocks, real_block_alignment);
  749. //To take advantage of node locality, wait until two
  750. //nodes belong to different blocks. Only then reinsert
  751. //the block of the first node in the block tree.
  752. //Cache of the previous block
  753. block_info_t *prev_block_info = 0;
  754. //If block was empty before this call, it's not already
  755. //inserted in the block tree.
  756. bool prev_block_was_empty = false;
  757. typedef typename free_nodes_t::iterator free_nodes_iterator;
  758. {
  759. const free_nodes_iterator itbb(nodes.before_begin()), ite(nodes.end());
  760. free_nodes_iterator itf(nodes.begin()), itbf(itbb);
  761. size_type splice_node_count = size_type(-1);
  762. while(itf != ite){
  763. void *pElem = boost::movelib::to_raw_pointer(boost::movelib::iterator_to_raw_pointer(itf));
  764. block_info_t &block_info = *this->priv_block_from_node(pElem, real_block_alignment);
  765. BOOST_ASSERT(block_info.free_nodes.size() < real_num_node);
  766. ++splice_node_count;
  767. //If block change is detected calculate the cached block position in the tree
  768. if(&block_info != prev_block_info){
  769. if(prev_block_info){ //Make sure we skip the initial "dummy" cache
  770. free_nodes_iterator it(itbb); ++it;
  771. nodes.erase_after(itbb, itf, splice_node_count);
  772. prev_block_info->free_nodes.incorporate_after(prev_block_info->free_nodes.last(), &*it, &*itbf, splice_node_count);
  773. this->priv_reinsert_block(*prev_block_info, prev_block_was_empty, real_num_node);
  774. splice_node_count = 0;
  775. }
  776. //Update cache with new data
  777. prev_block_was_empty = block_info.free_nodes.empty();
  778. prev_block_info = &block_info;
  779. }
  780. itbf = itf;
  781. ++itf;
  782. }
  783. }
  784. if(prev_block_info){
  785. //The loop reinserts all blocks except the last one
  786. const free_nodes_iterator itfirst(nodes.begin()), itlast(nodes.last());
  787. const size_type splice_node_count = nodes.size();
  788. nodes.clear();
  789. prev_block_info->free_nodes.incorporate_after(prev_block_info->free_nodes.last(), &*itfirst, &*itlast, splice_node_count);
  790. this->priv_reinsert_block(*prev_block_info, prev_block_was_empty, real_num_node);
  791. this->priv_deallocate_free_blocks(max_free_blocks, real_num_node, num_subblocks, real_block_alignment);
  792. }
  793. this->priv_invariants(real_num_node, num_subblocks, real_block_alignment);
  794. }
  795. void priv_reinsert_block(block_info_t &prev_block_info, const bool prev_block_was_empty, const size_type real_num_node)
  796. {
  797. //Cache the free nodes from the block
  798. const size_type this_block_free_nodes = prev_block_info.free_nodes.size();
  799. const bool is_full = this_block_free_nodes == real_num_node;
  800. //Update free block count
  801. m_totally_free_blocks += static_cast<size_type>(is_full);
  802. if(prev_block_was_empty){
  803. block_container_traits_t::insert_was_empty(m_block_container, prev_block_info, is_full);
  804. }
  805. else{
  806. block_container_traits_t::reinsert_was_used(m_block_container, prev_block_info, is_full);
  807. }
  808. }
  809. block_info_t *priv_block_from_node(void *node, const size_type real_block_alignment, AlignOnlyFalse) const
  810. {
  811. hdr_offset_holder *hdr_off_holder =
  812. reinterpret_cast<hdr_offset_holder*>((std::size_t)node & size_type(~(real_block_alignment - 1)));
  813. BOOST_ASSERT(0 == ((std::size_t)hdr_off_holder & (real_block_alignment - 1)));
  814. BOOST_ASSERT(0 == (hdr_off_holder->hdr_offset & (real_block_alignment - 1)));
  815. block_info_t *block = move_detail::force_ptr<block_info_t *>
  816. (reinterpret_cast<char*>(hdr_off_holder) + hdr_off_holder->hdr_offset);
  817. BOOST_ASSERT(block->hdr_offset == 0);
  818. return block;
  819. }
  820. block_info_t *priv_block_from_node(void *node, const size_type real_block_alignment, AlignOnlyTrue) const
  821. {
  822. return (block_info_t *)((std::size_t)node & std::size_t(~(real_block_alignment - 1)));
  823. }
  824. block_info_t *priv_block_from_node(void *node, const size_type real_block_alignment) const
  825. { return this->priv_block_from_node(node, real_block_alignment, IsAlignOnly()); }
  826. //!Deallocates all used memory. Never throws
  827. void priv_clear(const size_type num_subblocks, const size_type real_block_alignment, const size_type real_num_node)
  828. {
  829. #ifndef NDEBUG
  830. block_iterator it = m_block_container.begin();
  831. block_iterator itend = m_block_container.end();
  832. size_type n_free_nodes = 0;
  833. for(; it != itend; ++it){
  834. //Check for memory leak
  835. BOOST_ASSERT(it->free_nodes.size() == real_num_node);
  836. ++n_free_nodes;
  837. }
  838. BOOST_ASSERT(n_free_nodes == m_totally_free_blocks);
  839. #endif
  840. //Check for memory leaks
  841. this->priv_invariants(real_num_node, num_subblocks, real_block_alignment);
  842. multiallocation_chain chain;
  843. m_block_container.clear_and_dispose(block_destroyer(this, chain, num_subblocks, real_block_alignment, real_num_node));
  844. this->mp_segment_mngr_base->deallocate_many(chain);
  845. m_totally_free_blocks = 0;
  846. this->priv_invariants(real_num_node, num_subblocks, real_block_alignment);
  847. }
  848. public:
  849. private_adaptive_node_pool_impl_common(segment_manager_base_type *segment_mngr_base)
  850. //General purpose allocator
  851. : mp_segment_mngr_base(segment_mngr_base)
  852. , m_block_container()
  853. , m_totally_free_blocks(0)
  854. {}
  855. size_type num_free_nodes()
  856. {
  857. typedef typename block_container_t::const_iterator citerator;
  858. size_type count = 0;
  859. citerator it (m_block_container.begin()), itend(m_block_container.end());
  860. for(; it != itend; ++it){
  861. count += it->free_nodes.size();
  862. }
  863. return count;
  864. }
  865. void swap(private_adaptive_node_pool_impl_common &other)
  866. {
  867. std::swap(mp_segment_mngr_base, other.mp_segment_mngr_base);
  868. std::swap(m_totally_free_blocks, other.m_totally_free_blocks);
  869. m_block_container.swap(other.m_block_container);
  870. }
  871. //!Returns the segment manager. Never throws
  872. segment_manager_base_type* get_segment_manager_base()const
  873. { return boost::movelib::to_raw_pointer(mp_segment_mngr_base); }
  874. };
  875. template< class SizeType
  876. , std::size_t HdrSize
  877. , std::size_t PayloadPerAllocation
  878. , std::size_t RealNodeSize
  879. , std::size_t NodesPerBlock
  880. , std::size_t HdrOffsetSize
  881. , std::size_t OverheadPercent
  882. , bool AlignOnly>
  883. struct calculate_alignment_ct
  884. {
  885. BOOST_STATIC_CONSTEXPR std::size_t alignment = upper_power_of_2_ct<SizeType, HdrSize + RealNodeSize*NodesPerBlock>::value;
  886. BOOST_STATIC_CONSTEXPR std::size_t num_subblocks = 0;
  887. BOOST_STATIC_CONSTEXPR std::size_t real_num_node = (alignment - PayloadPerAllocation - HdrSize)/RealNodeSize;
  888. };
  889. template< class SizeType
  890. , std::size_t HdrSize
  891. , std::size_t PayloadPerAllocation
  892. , std::size_t RealNodeSize
  893. , std::size_t NodesPerBlock
  894. , std::size_t HdrOffsetSize
  895. , std::size_t OverheadPercent>
  896. struct calculate_alignment_ct
  897. < SizeType
  898. , HdrSize
  899. , PayloadPerAllocation
  900. , RealNodeSize
  901. , NodesPerBlock
  902. , HdrOffsetSize
  903. , OverheadPercent
  904. , false>
  905. {
  906. typedef typename candidate_power_of_2_ct
  907. < upper_power_of_2_ct<SizeType, HdrSize + PayloadPerAllocation + RealNodeSize>::value
  908. , RealNodeSize
  909. , PayloadPerAllocation
  910. , NodesPerBlock
  911. , HdrSize
  912. , HdrOffsetSize
  913. , OverheadPercent
  914. >::type type;
  915. BOOST_STATIC_CONSTEXPR std::size_t alignment = type::alignment;
  916. BOOST_STATIC_CONSTEXPR std::size_t num_subblocks = type::num_subblocks;
  917. BOOST_STATIC_CONSTEXPR std::size_t real_num_node = type::real_num_node;
  918. };
  919. /////////////////////////////////////////////
  920. //
  921. // private_adaptive_node_pool_impl_ct
  922. //
  923. /////////////////////////////////////////////
  924. template< class SegmentManagerBase
  925. , std::size_t MaxFreeBlocks
  926. , std::size_t NodeSize
  927. , std::size_t NodesPerBlock
  928. , std::size_t OverheadPercent
  929. , unsigned int Flags>
  930. class private_adaptive_node_pool_impl_ct
  931. : public private_adaptive_node_pool_impl_common<SegmentManagerBase, Flags>
  932. {
  933. typedef private_adaptive_node_pool_impl_common<SegmentManagerBase, Flags> base_t;
  934. //Non-copyable
  935. private_adaptive_node_pool_impl_ct();
  936. private_adaptive_node_pool_impl_ct(const private_adaptive_node_pool_impl_ct &);
  937. private_adaptive_node_pool_impl_ct &operator=(const private_adaptive_node_pool_impl_ct &);
  938. public:
  939. typedef typename base_t::void_pointer void_pointer;
  940. typedef typename base_t::size_type size_type;
  941. typedef typename base_t::multiallocation_chain multiallocation_chain;
  942. typedef typename base_t::segment_manager_base_type segment_manager_base_type;
  943. BOOST_STATIC_CONSTEXPR typename base_t::size_type PayloadPerAllocation = base_t::PayloadPerAllocation;
  944. //align_only
  945. BOOST_STATIC_CONSTEXPR bool AlignOnly = base_t::AlignOnly;
  946. private:
  947. BOOST_STATIC_CONSTEXPR size_type MaxAlign = base_t::MaxAlign;
  948. BOOST_STATIC_CONSTEXPR size_type HdrSize = base_t::HdrSize;
  949. BOOST_STATIC_CONSTEXPR size_type HdrOffsetSize = base_t::HdrOffsetSize;
  950. BOOST_STATIC_CONSTEXPR size_type RealNodeSize = lcm_ct<NodeSize, alignment_of<void_pointer>::value>::value;
  951. typedef calculate_alignment_ct
  952. < size_type, HdrSize, PayloadPerAllocation
  953. , RealNodeSize, NodesPerBlock, HdrOffsetSize, OverheadPercent, AlignOnly> data_t;
  954. //Round the size to a power of two value.
  955. //This is the total memory size (including payload) that we want to
  956. //allocate from the general-purpose allocator
  957. BOOST_STATIC_CONSTEXPR size_type NumSubBlocks = data_t::num_subblocks;
  958. BOOST_STATIC_CONSTEXPR size_type RealNumNode = data_t::real_num_node;
  959. BOOST_STATIC_CONSTEXPR size_type RealBlockAlignment = data_t::alignment;
  960. public:
  961. //!Constructor from a segment manager. Never throws
  962. private_adaptive_node_pool_impl_ct(typename base_t::segment_manager_base_type *segment_mngr_base)
  963. //General purpose allocator
  964. : base_t(segment_mngr_base)
  965. {}
  966. //!Destructor. Deallocates all allocated blocks. Never throws
  967. ~private_adaptive_node_pool_impl_ct()
  968. { this->priv_clear(NumSubBlocks, data_t::alignment, RealNumNode); }
  969. size_type get_real_num_node() const
  970. { return RealNumNode; }
  971. //!Allocates array of count elements. Can throw
  972. void *allocate_node()
  973. {
  974. return this->priv_allocate_node
  975. (MaxFreeBlocks, data_t::alignment, RealNodeSize, RealNumNode, NumSubBlocks);
  976. }
  977. //!Allocates n nodes.
  978. //!Can throw
  979. void allocate_nodes(const size_type n, multiallocation_chain &chain)
  980. {
  981. this->priv_allocate_nodes
  982. (n, chain, MaxFreeBlocks, data_t::alignment, RealNodeSize, RealNumNode, NumSubBlocks);
  983. }
  984. //!Deallocates an array pointed by ptr. Never throws
  985. void deallocate_node(void *pElem)
  986. {
  987. this->priv_deallocate_node(pElem, MaxFreeBlocks, RealNumNode, NumSubBlocks, RealBlockAlignment);
  988. }
  989. //!Deallocates a linked list of nodes. Never throws
  990. void deallocate_nodes(multiallocation_chain &nodes)
  991. {
  992. this->priv_deallocate_nodes(nodes, MaxFreeBlocks, RealNumNode, NumSubBlocks, data_t::alignment);
  993. }
  994. void deallocate_free_blocks()
  995. { this->priv_deallocate_free_blocks(0, RealNumNode, NumSubBlocks, data_t::alignment); }
  996. //Deprecated, use deallocate_free_blocks
  997. void deallocate_free_chunks()
  998. { this->priv_deallocate_free_blocks(0, RealNumNode, NumSubBlocks, data_t::alignment); }
  999. };
  1000. /////////////////////////////////////////////
  1001. //
  1002. // private_adaptive_node_pool_impl_rt
  1003. //
  1004. /////////////////////////////////////////////
  1005. template<class SizeType>
  1006. struct private_adaptive_node_pool_impl_rt_data
  1007. {
  1008. typedef SizeType size_type;
  1009. private_adaptive_node_pool_impl_rt_data(size_type max_free_blocks, size_type real_node_size)
  1010. : m_max_free_blocks(max_free_blocks), m_real_node_size(real_node_size)
  1011. , m_real_block_alignment(), m_num_subblocks(), m_real_num_node()
  1012. {}
  1013. const size_type m_max_free_blocks;
  1014. const size_type m_real_node_size;
  1015. //Round the size to a power of two value.
  1016. //This is the total memory size (including payload) that we want to
  1017. //allocate from the general-purpose allocator
  1018. size_type m_real_block_alignment;
  1019. size_type m_num_subblocks;
  1020. //This is the real number of nodes per block
  1021. size_type m_real_num_node;
  1022. };
  1023. template<class SegmentManagerBase, unsigned int Flags>
  1024. class private_adaptive_node_pool_impl_rt
  1025. : private private_adaptive_node_pool_impl_rt_data<typename SegmentManagerBase::size_type>
  1026. , public private_adaptive_node_pool_impl_common<SegmentManagerBase, Flags>
  1027. {
  1028. typedef private_adaptive_node_pool_impl_common<SegmentManagerBase, Flags> impl_t;
  1029. typedef private_adaptive_node_pool_impl_rt_data<typename SegmentManagerBase::size_type> data_t;
  1030. //Non-copyable
  1031. private_adaptive_node_pool_impl_rt();
  1032. private_adaptive_node_pool_impl_rt(const private_adaptive_node_pool_impl_rt &);
  1033. private_adaptive_node_pool_impl_rt &operator=(const private_adaptive_node_pool_impl_rt &);
  1034. protected:
  1035. typedef typename impl_t::void_pointer void_pointer;
  1036. typedef typename impl_t::size_type size_type;
  1037. typedef typename impl_t::multiallocation_chain multiallocation_chain;
  1038. BOOST_STATIC_CONSTEXPR typename impl_t::size_type PayloadPerAllocation = impl_t::PayloadPerAllocation;
  1039. //Flags
  1040. //align_only
  1041. BOOST_STATIC_CONSTEXPR bool AlignOnly = impl_t::AlignOnly;
  1042. BOOST_STATIC_CONSTEXPR size_type HdrSize = impl_t::HdrSize;
  1043. BOOST_STATIC_CONSTEXPR size_type HdrOffsetSize = impl_t::HdrOffsetSize;
  1044. public:
  1045. //!Segment manager typedef
  1046. typedef SegmentManagerBase segment_manager_base_type;
  1047. //!Constructor from a segment manager. Never throws
  1048. private_adaptive_node_pool_impl_rt
  1049. ( segment_manager_base_type *segment_mngr_base
  1050. , size_type node_size
  1051. , size_type nodes_per_block
  1052. , size_type max_free_blocks
  1053. , unsigned char overhead_percent
  1054. )
  1055. : data_t(max_free_blocks, lcm(node_size, size_type(alignment_of<void_pointer>::value)))
  1056. , impl_t(segment_mngr_base)
  1057. {
  1058. if(AlignOnly){
  1059. this->m_real_block_alignment = upper_power_of_2(HdrSize + this->m_real_node_size*nodes_per_block);
  1060. this->m_real_num_node = (this->m_real_block_alignment - PayloadPerAllocation - HdrSize)/this->m_real_node_size;
  1061. }
  1062. else{
  1063. candidate_power_of_2_rt ( upper_power_of_2(HdrSize + PayloadPerAllocation + this->m_real_node_size)
  1064. , this->m_real_node_size
  1065. , PayloadPerAllocation
  1066. , nodes_per_block
  1067. , HdrSize
  1068. , HdrOffsetSize
  1069. , overhead_percent
  1070. , this->m_real_block_alignment
  1071. , this->m_num_subblocks
  1072. , this->m_real_num_node);
  1073. }
  1074. }
  1075. //!Destructor. Deallocates all allocated blocks. Never throws
  1076. ~private_adaptive_node_pool_impl_rt()
  1077. { this->priv_clear(this->m_num_subblocks, this->m_real_block_alignment, this->m_real_num_node); }
  1078. size_type get_real_num_node() const
  1079. { return this->m_real_num_node; }
  1080. //!Allocates array of count elements. Can throw
  1081. void *allocate_node()
  1082. {
  1083. return this->priv_allocate_node
  1084. (this->m_max_free_blocks, this->m_real_block_alignment, this->m_real_node_size, this->m_real_num_node, this->m_num_subblocks);
  1085. }
  1086. //!Allocates n nodes.
  1087. //!Can throw
  1088. void allocate_nodes(const size_type n, multiallocation_chain &chain)
  1089. {
  1090. this->priv_allocate_nodes
  1091. (n, chain, this->m_max_free_blocks, this->m_real_block_alignment, this->m_real_node_size, this->m_real_num_node, this->m_num_subblocks);
  1092. }
  1093. //!Deallocates an array pointed by ptr. Never throws
  1094. void deallocate_node(void *pElem)
  1095. {
  1096. this->priv_deallocate_node(pElem, this->m_max_free_blocks, this->m_real_num_node, this->m_num_subblocks, this->m_real_block_alignment);
  1097. }
  1098. //!Deallocates a linked list of nodes. Never throws
  1099. void deallocate_nodes(multiallocation_chain &nodes)
  1100. {
  1101. this->priv_deallocate_nodes(nodes, this->m_max_free_blocks, this->m_real_num_node, this->m_num_subblocks, this->m_real_block_alignment);
  1102. }
  1103. void deallocate_free_blocks()
  1104. { this->priv_deallocate_free_blocks(0, this->m_real_num_node, this->m_num_subblocks, this->m_real_block_alignment); }
  1105. //Deprecated, use deallocate_free_blocks
  1106. void deallocate_free_chunks()
  1107. { this->priv_deallocate_free_blocks(0, this->m_real_num_node, this->m_num_subblocks, this->m_real_block_alignment); }
  1108. };
  1109. } //namespace dtl {
  1110. } //namespace container {
  1111. } //namespace boost {
  1112. #include <boost/container/detail/config_end.hpp>
  1113. #endif //#ifndef BOOST_CONTAINER_DETAIL_ADAPTIVE_NODE_POOL_IMPL_HPP