13 #ifndef STXXL_STACK_HEADER
14 #define STXXL_STACK_HEADER
19 #include <stxxl/bits/mng/mng.h>
20 #include <stxxl/bits/common/simple_vector.h>
21 #include <stxxl/bits/common/tmeta.h>
22 #include <stxxl/bits/mng/write_pool.h>
23 #include <stxxl/bits/mng/prefetch_pool.h>
26 __STXXL_BEGIN_NAMESPACE
31 template <
class ValTp,
32 unsigned BlocksPerPage = 4,
33 unsigned BlkSz = STXXL_DEFAULT_BLOCK_SIZE(ValTp),
34 class AllocStr = STXXL_DEFAULT_ALLOC_STRATEGY,
35 class SzTp = stxxl::int64>
36 struct stack_config_generator
38 typedef ValTp value_type;
39 enum { blocks_per_page = BlocksPerPage };
40 typedef AllocStr alloc_strategy;
41 enum { block_size = BlkSz };
42 typedef SzTp size_type;
53 template <
class Config_>
58 typedef typename cfg::value_type value_type;
59 typedef typename cfg::alloc_strategy alloc_strategy;
60 typedef typename cfg::size_type size_type;
62 blocks_per_page = cfg::blocks_per_page,
63 block_size = cfg::block_size
71 unsigned_type cache_offset;
72 value_type * current_element;
73 simple_vector<block_type> cache;
74 typename simple_vector<block_type>::iterator front_page;
75 typename simple_vector<block_type>::iterator back_page;
76 std::vector<bid_type> bids;
77 alloc_strategy alloc_strategy_;
83 current_element(NULL),
84 cache(blocks_per_page * 2),
85 front_page(cache.begin() + blocks_per_page),
86 back_page(cache.begin()),
89 bids.reserve(blocks_per_page);
94 std::swap(size_, obj.size_);
95 std::swap(cache_offset, obj.cache_offset);
96 std::swap(current_element, obj.current_element);
97 std::swap(cache, obj.cache);
98 std::swap(front_page, obj.front_page);
99 std::swap(back_page, obj.back_page);
100 std::swap(bids, obj.bids);
101 std::swap(alloc_strategy_, obj.alloc_strategy_);
107 template <
class stack_type>
111 current_element(NULL),
112 cache(blocks_per_page * 2),
113 front_page(cache.begin() + blocks_per_page),
114 back_page(cache.begin()),
117 bids.reserve(blocks_per_page);
119 stack_type stack_copy = stack_;
120 const size_type sz = stack_copy.size();
123 std::vector<value_type> tmp(sz);
125 for (i = 0; i < sz; ++i)
127 tmp[sz - i - 1] = stack_copy.top();
130 for (i = 0; i < sz; ++i)
135 STXXL_VERBOSE(STXXL_PRETTY_FUNCTION_NAME);
136 block_manager::get_instance()->delete_blocks(bids.begin(), bids.end());
138 size_type size()
const
149 return (*current_element);
151 const value_type & top()
const
154 return (*current_element);
156 void push(
const value_type & val)
163 STXXL_VERBOSE2(
"growing, size: " << size_);
165 bids.resize(bids.size() + blocks_per_page);
166 typename std::vector<bid_type>::iterator cur_bid = bids.end() - blocks_per_page;
167 block_manager::get_instance()->new_blocks(
170 simple_vector<request_ptr> requests(blocks_per_page);
172 for (
int i = 0; i < blocks_per_page; ++i, ++cur_bid)
174 requests[i] = (back_page + i)->write(*cur_bid);
178 std::swap(back_page, front_page);
180 bids.reserve(bids.size() + blocks_per_page);
182 cache_offset = blocks_per_page * block_type::size + 1;
183 current_element = &((*front_page)[0]);
186 wait_all(requests.begin(), blocks_per_page);
188 *current_element = val;
193 current_element = element(cache_offset);
194 *current_element = val;
200 assert(cache_offset <= 2 * blocks_per_page * block_type::size);
201 assert(cache_offset > 0);
204 if (cache_offset == 1 && bids.size() >= blocks_per_page)
206 STXXL_VERBOSE2(
"shrinking, size: " << size_);
208 simple_vector<request_ptr> requests(blocks_per_page);
211 typename std::vector<bid_type>::const_iterator cur_bid = bids.end();
212 for (
int i = blocks_per_page - 1; i >= 0; --i)
214 requests[i] = (front_page + i)->read(*(--cur_bid));
218 std::swap(front_page, back_page);
222 current_element = &((*(back_page + (blocks_per_page - 1)))[block_type::size - 1]);
224 wait_all(requests.begin(), blocks_per_page);
226 block_manager::get_instance()->delete_blocks(bids.end() - blocks_per_page, bids.end());
227 bids.resize(bids.size() - blocks_per_page);
234 current_element = element((--cache_offset) - 1);
238 value_type * element(unsigned_type offset)
240 if (offset < blocks_per_page * block_type::size)
244 unsigned_type unbiased_offset = offset - blocks_per_page *
block_type::size;
255 template <
class Config_>
260 typedef typename cfg::value_type value_type;
261 typedef typename cfg::alloc_strategy alloc_strategy;
262 typedef typename cfg::size_type size_type;
264 blocks_per_page = cfg::blocks_per_page,
265 block_size = cfg::block_size,
273 unsigned_type cache_offset;
274 value_type * current_element;
275 simple_vector<block_type> cache;
276 typename simple_vector<block_type>::iterator cache_buffers;
277 typename simple_vector<block_type>::iterator overlap_buffers;
278 simple_vector<request_ptr> requests;
279 std::vector<bid_type> bids;
280 alloc_strategy alloc_strategy_;
286 current_element(NULL),
287 cache(blocks_per_page * 2),
288 cache_buffers(cache.begin()),
289 overlap_buffers(cache.begin() + blocks_per_page),
290 requests(blocks_per_page),
293 bids.reserve(blocks_per_page);
298 std::swap(size_, obj.size_);
299 std::swap(cache_offset, obj.cache_offset);
300 std::swap(current_element, obj.current_element);
301 std::swap(cache, obj.cache);
302 std::swap(cache_buffers, obj.cache_buffers);
303 std::swap(overlap_buffers, obj.overlap_buffers);
304 std::swap(requests, obj.requests);
305 std::swap(bids, obj.bids);
306 std::swap(alloc_strategy_, obj.alloc_strategy_);
312 template <
class stack_type>
316 current_element(NULL),
317 cache(blocks_per_page * 2),
318 cache_buffers(cache.begin()),
319 overlap_buffers(cache.begin() + blocks_per_page),
320 requests(blocks_per_page),
323 bids.reserve(blocks_per_page);
325 stack_type stack_copy = stack_;
326 const size_type sz = stack_copy.size();
329 std::vector<value_type> tmp(sz);
331 for (i = 0; i < sz; ++i)
333 tmp[sz - i - 1] = stack_copy.top();
336 for (i = 0; i < sz; ++i)
341 STXXL_VERBOSE(STXXL_PRETTY_FUNCTION_NAME);
344 if (requests[0].
get())
345 wait_all(requests.begin(), blocks_per_page);
347 catch (
const io_error & ex)
349 block_manager::get_instance()->delete_blocks(bids.begin(), bids.end());
351 size_type size()
const
362 return (*current_element);
364 const value_type & top()
const
367 return (*current_element);
369 void push(
const value_type & val)
371 assert(cache_offset <= blocks_per_page * block_type::size);
374 if (cache_offset == blocks_per_page * block_type::size)
376 STXXL_VERBOSE2(
"growing, size: " << size_);
378 bids.resize(bids.size() + blocks_per_page);
379 typename std::vector<bid_type>::iterator cur_bid = bids.end() - blocks_per_page;
380 block_manager::get_instance()->new_blocks(
383 for (
int i = 0; i < blocks_per_page; ++i, ++cur_bid)
385 if (requests[i].
get())
388 requests[i] = (cache_buffers + i)->write(*cur_bid);
391 std::swap(cache_buffers, overlap_buffers);
393 bids.reserve(bids.size() + blocks_per_page);
396 current_element = &((*cache_buffers)[0]);
399 *current_element = val;
405 *current_element = val;
411 assert(cache_offset <= blocks_per_page * block_type::size);
412 assert(cache_offset > 0);
415 if (cache_offset == 1 && bids.size() >= blocks_per_page)
417 STXXL_VERBOSE2(
"shrinking, size: " << size_);
419 if (requests[0].
get())
420 wait_all(requests.begin(), blocks_per_page);
423 std::swap(cache_buffers, overlap_buffers);
425 if (bids.size() > blocks_per_page)
427 STXXL_VERBOSE2(
"prefetching, size: " << size_);
428 typename std::vector<bid_type>::const_iterator cur_bid = bids.end() - blocks_per_page;
429 for (
int i = blocks_per_page - 1; i >= 0; --i)
430 requests[i] = (overlap_buffers + i)->read(*(--cur_bid));
433 block_manager::get_instance()->delete_blocks(bids.end() - blocks_per_page, bids.end());
434 bids.resize(bids.size() - blocks_per_page);
438 current_element = &((*(cache_buffers + (blocks_per_page - 1)))[block_type::size - 1]);
444 unsigned_type cur_offset = (--cache_offset) - 1;
450 template <
class Config_>
455 typedef typename cfg::value_type value_type;
456 typedef typename cfg::alloc_strategy alloc_strategy;
457 typedef typename cfg::size_type size_type;
459 blocks_per_page = cfg::blocks_per_page,
460 block_size = cfg::block_size,
468 unsigned_type cache_offset;
471 std::vector<bid_type> bids;
472 alloc_strategy alloc_strategy_;
473 unsigned_type pref_aggr;
485 unsigned_type prefetch_aggressiveness = 0) :
489 pref_aggr(prefetch_aggressiveness),
493 STXXL_VERBOSE2(
"grow_shrink_stack2::grow_shrink_stack2(...)");
498 std::swap(size_, obj.size_);
499 std::swap(cache_offset, obj.cache_offset);
500 std::swap(cache, obj.cache);
501 std::swap(current, obj.current);
502 std::swap(bids, obj.bids);
503 std::swap(alloc_strategy_, obj.alloc_strategy_);
504 std::swap(pref_aggr, obj.pref_aggr);
513 STXXL_VERBOSE2(
"grow_shrink_stack2::~grow_shrink_stack2()");
514 const int_type bids_size = bids.size();
515 const int_type last_pref = STXXL_MAX(int_type(bids_size) - int_type(pref_aggr), (int_type)0);
517 for (i = bids_size - 1; i >= last_pref; --i)
519 if (p_pool.in_prefetching(bids[i]))
520 p_pool.
read(cache, bids[i])->
wait();
523 typename std::vector<bid_type>::iterator cur = bids.begin();
524 typename std::vector<bid_type>::const_iterator end = bids.end();
525 for ( ; cur != end; ++cur)
527 block_type * b = w_pool.
steal(*cur);
536 catch (
const io_error & ex)
538 block_manager::get_instance()->delete_blocks(bids.begin(), bids.end());
540 size_type size()
const {
return size_; }
547 void push(
const value_type & val)
549 STXXL_VERBOSE3(
"grow_shrink_stack2::push(" << val <<
")");
550 assert(cache_offset <= block_type::size);
552 if (cache_offset == block_type::size)
554 STXXL_VERBOSE2(
"grow_shrink_stack2::push(" << val <<
") growing, size: " << size_);
556 bids.resize(bids.size() + 1);
557 typename std::vector<bid_type>::iterator cur_bid = bids.end() - 1;
558 block_manager::get_instance()->new_blocks(
560 w_pool.
write(cache, bids.back());
561 cache = w_pool.
steal();
562 const int_type bids_size = bids.size();
563 const int_type last_pref = STXXL_MAX(int_type(bids_size) - int_type(pref_aggr) - 1, (int_type)0);
564 for (int_type i = bids_size - 2; i >= last_pref; --i)
566 if (p_pool.in_prefetching(bids[i]))
567 p_pool.
read(cache, bids[i])->
wait();
573 (*cache)[cache_offset] = val;
577 assert(cache_offset > 0);
578 assert(cache_offset <= block_type::size);
583 assert(cache_offset > 0);
584 assert(cache_offset <= block_type::size);
587 const value_type & top()
const
590 assert(cache_offset > 0);
591 assert(cache_offset <= block_type::size);
596 STXXL_VERBOSE3(
"grow_shrink_stack2::pop()");
598 assert(cache_offset > 0);
599 assert(cache_offset <= block_type::size);
600 if (cache_offset == 1 && (!bids.empty()))
602 STXXL_VERBOSE2(
"grow_shrink_stack2::pop() shrinking, size = " << size_);
604 bid_type last_block = bids.back();
617 p_pool.
read(cache, last_block)->
wait();
619 block_manager::get_instance()->delete_block(last_block);
620 const int_type bids_size = bids.size();
621 const int_type last_pref = STXXL_MAX(int_type(bids_size) - int_type(pref_aggr), (int_type)0);
622 for (int_type i = bids_size - 1; i >= last_pref; --i)
624 p_pool.
hint(bids[i]);
626 cache_offset = block_type::size + 1;
630 if (cache_offset > 0)
631 current = (*cache)[cache_offset - 1];
640 if (pref_aggr > new_p)
642 const int_type bids_size = bids.size();
643 const int_type last_pref = STXXL_MAX(int_type(bids_size) - int_type(pref_aggr), (int_type)0);
644 for (int_type i = bids_size - new_p - 1; i >= last_pref; --i)
646 if (p_pool.in_prefetching(bids[i]))
647 p_pool.
read(cache, bids[i])->
wait();
651 else if (pref_aggr < new_p)
653 const int_type bids_size = bids.size();
654 const int_type last_pref = STXXL_MAX(int_type(bids_size) - int_type(new_p), (int_type)0);
655 for (int_type i = bids_size - 1; i >= last_pref; --i)
657 p_pool.
hint(bids[i]);
673 template <
unsigned_type CritSize,
class ExternalStack,
class InternalStack>
677 typedef typename ExternalStack::cfg cfg;
678 typedef typename cfg::value_type value_type;
679 typedef typename cfg::alloc_strategy alloc_strategy;
680 typedef typename cfg::size_type size_type;
682 blocks_per_page = cfg::blocks_per_page,
683 block_size = cfg::block_size
687 typedef InternalStack int_stack_type;
688 typedef ExternalStack ext_stack_type;
691 enum { critical_size = CritSize };
693 int_stack_type * int_impl;
694 ext_stack_type * ext_impl;
697 template <
class stack_type>
705 std::swap(int_impl, obj.int_impl);
706 std::swap(ext_impl, obj.ext_impl);
710 bool internal()
const
712 assert((int_impl && !ext_impl) || (!int_impl && ext_impl));
718 assert((int_impl && !ext_impl) || (!int_impl && ext_impl));
724 assert((int_impl && !ext_impl) || (!int_impl && ext_impl));
727 return int_impl->empty();
730 return ext_impl->empty();
732 size_type size()
const
734 assert((int_impl && !ext_impl) || (!int_impl && ext_impl));
737 return int_impl->size();
740 return ext_impl->size();
744 assert((int_impl && !ext_impl) || (!int_impl && ext_impl));
747 return int_impl->top();
750 return ext_impl->top();
752 const value_type & top()
const
754 assert((int_impl && !ext_impl) || (!int_impl && ext_impl));
757 return int_impl->top();
760 return ext_impl->top();
762 void push(
const value_type & val)
764 assert((int_impl && !ext_impl) || (!int_impl && ext_impl));
769 if (int_impl->size() == critical_size)
772 ext_impl =
new ext_stack_type(*int_impl);
782 assert((int_impl && !ext_impl) || (!int_impl && ext_impl));
792 assert((int_impl && !ext_impl) || (!int_impl && ext_impl));
808 enum stack_externality { external, migrating,
internal };
809 enum stack_behaviour { normal, grow_shrink, grow_shrink2 };
850 stack_externality Externality = external,
851 stack_behaviour Behaviour = normal,
852 unsigned BlocksPerPage = 4,
853 unsigned BlkSz = STXXL_DEFAULT_BLOCK_SIZE(ValTp),
855 class IntStackTp = std::stack<ValTp>,
856 unsigned_type MigrCritSize = (2 * BlocksPerPage * BlkSz),
858 class AllocStr = STXXL_DEFAULT_ALLOC_STRATEGY,
859 class SzTp = stxxl::int64
863 typedef stack_config_generator<ValTp, BlocksPerPage, BlkSz, AllocStr, SzTp> cfg;
865 typedef typename IF<Behaviour == grow_shrink,
869 typedef typename IF<Externality == migrating,
873 typedef typename IF<Externality == internal, IntStackTp, MigrOrNotStackTp>::result result;
878 __STXXL_END_NAMESPACE
883 template <
class Config_>
884 void swap(stxxl::normal_stack<Config_> & a,
885 stxxl::normal_stack<Config_> & b)
890 template <
class Config_>
891 void swap(stxxl::grow_shrink_stack<Config_> & a,
892 stxxl::grow_shrink_stack<Config_> & b)
897 template <
class Config_>
898 void swap(stxxl::grow_shrink_stack2<Config_> & a,
899 stxxl::grow_shrink_stack2<Config_> & b)
904 template <stxxl::
unsigned_type CritSize,
class ExternalStack,
class InternalStack>
905 void swap(stxxl::migrating_stack<CritSize, ExternalStack, InternalStack> & a,
906 stxxl::migrating_stack<CritSize, ExternalStack, InternalStack> & b)
912 #endif // !STXXL_STACK_HEADER