25 #ifndef _PASL_DATA_BOOTCHUNKEDSEQNEW_H_
26 #define _PASL_DATA_BOOTCHUNKEDSEQNEW_H_
31 namespace bootchunkedseq {
36 template <
class Item,
class Measured>
48 cached = other.cached;
59 template <
class Top_item_base,
60 int Chunk_capacity = 32,
70 Top_item_deleter, Top_item_copier, Chunk_struct, Size_access>;
101 measured_type m = algebra_type::identity();
102 for (
auto p = lo; p < hi; p++)
103 m = algebra_type::combine(m,
operator()(*p));
109 static void swap(measured_type& x, measured_type& y) {
115 using measured_type =
typename cache_type::measured_type;
116 using algebra_type =
typename cache_type::algebra_type;
117 using measure_type =
typename cache_type::measure_type;
119 using queue_type = Chunk_struct<cached_item_type, Chunk_capacity, std::allocator<cached_item_type> >;
121 using annotation_type =
typename Top_item_base::annotation_type;
124 using chunk_pointer = chunk_type*;
125 using const_chunk_pointer =
const chunk_type*;
127 measure_type meas_fct;
129 using top_item_type = Top_item_base*;
132 top_item_type top_item;
133 chunk_pointer rec_item;
142 measure_type meas_fct;
144 measured_type cached;
145 chunk_type front_outer, front_inner, back_inner, back_outer;
146 chunk_type& shallow_chunk ;
153 annotation_type annotation;
155 using self_type = layer;
157 layer() : shallow_chunk(front_outer) {
173 void incr_front(measured_type& cached,
const measured_type& m) {
174 cached = algebra_type::combine(m, cached);
177 void incr_back(measured_type& cached,
const measured_type& m) {
178 cached = algebra_type::combine(cached, m);
181 void decr_front(measured_type& cached,
const measured_type& m) {
182 incr_front(cached, algebra_type::inverse(m));
185 void decr_back(measured_type& cached,
const measured_type& m) {
186 incr_back(cached, algebra_type::inverse(m));
189 void rec_copy(
int depth,
const self_type& other) {
193 cached = other.cached;
194 chunk_deep_copy(depth, other.front_outer, front_outer);
195 chunk_deep_copy(depth, other.front_inner, front_inner);
196 chunk_deep_copy(depth, other.back_inner, back_inner);
197 chunk_deep_copy(depth, other.back_outer, back_outer);
198 if (other.middle == NULL) {
201 middle =
new layer();
202 middle->rec_copy(depth+1, *(other.middle));
206 void swap(self_type& other) {
207 std::swap(cached, other.cached);
208 std::swap(middle, other.middle);
209 front_outer.swap(other.front_outer);
210 front_inner.swap(other.front_inner);
211 back_inner.swap(other.back_inner);
212 back_outer.swap(other.back_outer);
215 void convert_deep_to_shallow() {
220 inline bool is_shallow()
const {
221 return (middle == NULL);
224 void reset_cached() {
227 cached = algebra_type::identity();
228 incr_back(cached, front_outer.get_cached());
229 incr_back(cached, front_inner.get_cached());
230 incr_back(cached, middle->get_cached());
231 incr_back(cached, back_inner.get_cached());
232 incr_back(cached, back_outer.get_cached());
238 inline void push_buffer_front_force(chunk_type& c) {
239 chunk_pointer d = chunk_alloc();
241 middle->push_front(cached_item_of_chunk_pointer(d));
245 inline void push_buffer_back_force(chunk_type& c) {
246 chunk_pointer d = chunk_alloc();
248 middle->push_back(cached_item_of_chunk_pointer(d));
251 inline bool empty()
const {
253 return shallow_chunk.empty();
255 return front_outer.empty() && back_outer.empty();
260 return shallow_chunk.get_cached();
267 if (! shallow_chunk.full()) {
268 shallow_chunk.push_front(meas_fct, x);
271 middle =
new layer();
272 cached = shallow_chunk.get_cached();
273 shallow_chunk.swap(back_outer);
275 incr_front(cached, x.get_cached());
276 front_outer.push_front(meas_fct, x);
279 if (front_outer.full()) {
280 if (front_inner.full())
281 push_buffer_front_force(front_inner);
282 front_outer.swap(front_inner);
283 assert(front_outer.empty());
285 incr_front(cached, x.get_cached());
286 front_outer.push_front(meas_fct, x);
292 if (! shallow_chunk.full()) {
293 shallow_chunk.push_back(meas_fct, x);
296 middle =
new layer();
297 cached = shallow_chunk.get_cached();
299 incr_back(cached, x.get_cached());
300 back_outer.push_back(meas_fct, x);
303 if (back_outer.full()) {
304 if (back_inner.full())
305 push_buffer_back_force(back_inner);
306 back_outer.swap(back_inner);
307 assert(back_outer.empty());
309 incr_back(cached, x.get_cached());
310 back_outer.push_back(meas_fct, x);
314 cached_item_type&
front()
const {
316 return shallow_chunk.front();
318 assert(! front_outer.empty() || front_inner.empty());
319 if (! front_outer.empty()) {
320 return front_outer.front();
321 }
else if (! middle->empty()) {
322 chunk_pointer c = chunk_pointer_of_cached_item(middle->front());
324 }
else if (! back_inner.empty()) {
325 return back_inner.front();
327 assert(! back_outer.empty());
328 return back_outer.front();
333 cached_item_type&
back()
const {
335 return shallow_chunk.back();
337 assert(! back_outer.empty() || back_inner.empty());
338 if (! back_outer.empty()) {
339 return back_outer.back();
340 }
else if (! middle->empty()) {
341 chunk_pointer c = chunk_pointer_of_cached_item(middle->back());
343 }
else if (! front_inner.empty()) {
344 return front_inner.back();
346 assert(! front_outer.empty());
347 return front_outer.back();
356 void try_populate_front_outer() {
357 if (front_outer.empty()) {
358 if (! front_inner.empty()) {
359 front_inner.swap(front_outer);
360 }
else if (! middle->empty()) {
361 chunk_pointer c = chunk_pointer_of_cached_item(middle->pop_front());
362 front_outer.swap(*c);
363 chunk_free_when_empty(c);
364 }
else if (! back_inner.empty()) {
365 back_inner.swap(front_outer);
366 }
else if (! back_outer.empty()) {
367 back_outer.swap(front_outer);
369 convert_deep_to_shallow();
374 void try_populate_back_outer() {
375 if (back_outer.empty()) {
376 if (! back_inner.empty()) {
377 back_inner.swap(back_outer);
378 }
else if (! middle->empty()) {
379 chunk_pointer c = chunk_pointer_of_cached_item(middle->pop_back());
381 chunk_free_when_empty(c);
382 }
else if (! front_inner.empty()) {
383 front_inner.swap(back_outer);
384 }
else if (! front_outer.empty()) {
385 front_outer.swap(back_outer);
387 convert_deep_to_shallow();
394 return shallow_chunk.pop_front(meas_fct);
396 if (front_outer.empty()) {
397 assert(front_inner.empty());
398 if (! middle->empty()) {
399 chunk_pointer c = chunk_pointer_of_cached_item(middle->pop_front());
400 front_outer.swap(*c);
401 chunk_free_when_empty(c);
402 }
else if (! back_inner.empty()) {
403 back_inner.swap(front_outer);
404 }
else if (! back_outer.empty()) {
405 back_outer.swap(front_outer);
408 assert(! front_outer.empty());
409 cached_item_type x = front_outer.pop_front(meas_fct);
410 if (algebra_type::has_inverse)
411 decr_front(cached, x.get_cached());
414 try_populate_front_outer();
421 return shallow_chunk.pop_back(meas_fct);
423 if (back_outer.empty()) {
424 assert(back_inner.empty());
425 if (! middle->empty()) {
426 chunk_pointer c = chunk_pointer_of_cached_item(middle->pop_back());
428 chunk_free_when_empty(c);
429 }
else if (! front_inner.empty()) {
430 front_inner.swap(back_outer);
431 }
else if (! front_outer.empty()) {
432 front_outer.swap(back_outer);
435 assert(! back_outer.empty());
436 cached_item_type x = back_outer.pop_back(meas_fct);
437 if (algebra_type::has_inverse)
438 decr_back(cached, x.get_cached());
441 try_populate_back_outer();
448 void restore_both_outer_empty_middle_empty() {
451 if (front_outer.empty() && back_outer.empty()) {
452 if (middle->empty()) {
453 convert_deep_to_shallow();
456 chunk_pointer c = chunk_pointer_of_cached_item(middle->pop_front());
457 front_outer.swap(*c);
458 chunk_free_when_empty(c);
463 void ensure_empty_inner() {
464 if (! front_inner.empty())
465 push_buffer_front_force(front_inner);
466 if (! back_inner.empty())
467 push_buffer_back_force(back_inner);
479 template <
class Pred>
480 measured_type search_in_layer(
const Pred& p, measured_type prefix,
position_type& pos)
const {
481 measured_type cur = prefix;
483 if (! front_outer.empty()) {
485 cur = algebra_type::combine(cur, front_outer.get_cached());
487 pos = found_front_outer;
499 if (! front_inner.empty()) {
501 cur = algebra_type::combine(cur, front_inner.get_cached());
503 pos = found_front_inner;
507 if (! middle->empty()) {
509 cur = algebra_type::combine(prefix, middle->get_cached());
515 if (! back_inner.empty()) {
517 cur = algebra_type::combine(cur, back_inner.get_cached());
519 pos = found_back_inner;
523 if (! back_outer.empty()) {
525 cur = algebra_type::combine(cur, back_outer.get_cached());
527 pos = found_back_outer;
540 template <
class Node,
class Po
inter>
541 static void cache_search_data_for_backtracking(
const Node& nd,
545 const measured_type prefix) {
546 nd.annotation.parent.set_pointer(ptr, tag);
547 assert(!annotation_type::finger_search_enabled ||
548 nd.annotation.parent.template get_tag() == tag);
549 nd.annotation.parent.set_depth(depth);
550 nd.annotation.parent.template set_prefix<measured_type>(prefix);
553 template <
class Pred>
554 static measured_type chunk_search(
const measure_type& meas_fct,
558 const measured_type prefix,
559 const Top_item_base*& r) {
560 chunk_search_type search;
561 chunk_search_result_type s = search(c, meas_fct, prefix, p);
562 measured_type prefix_res = s.prefix;
564 cached_item_type& t = c[i];
567 r = top_item_of_cached_item(t);
568 cache_search_data_for_backtracking(*r, &c, tag, depth, prefix);
570 const_chunk_pointer tc = const_chunk_pointer_of_cached_item(t);
571 cache_search_data_for_backtracking(*tc, &c, tag, depth, prefix);
572 prefix_res = chunk_search(meas_fct, *tc, depth-1, p, prefix_res, r);
577 template <
class Pred>
578 measured_type rec_search(
const measure_type& meas_fct,
const int depth,
579 const Pred& p,
const measured_type prefix,
580 const Top_item_base*& r)
const {
582 measured_type prefix_res = search_in_layer(p, prefix, pos);
585 case found_front_outer: {
586 cache_search_data_for_backtracking(front_outer,
this, tag, depth, prefix);
587 prefix_res = chunk_search(meas_fct, front_outer, depth, p, prefix_res, r);
590 case found_front_inner: {
591 cache_search_data_for_backtracking(front_inner,
this, tag, depth, prefix);
592 prefix_res = chunk_search(meas_fct, front_inner, depth, p, prefix_res, r);
596 cache_search_data_for_backtracking(*middle,
this, tag, depth, prefix);
597 prefix_res = middle->rec_search(meas_fct, depth+1, p, prefix_res, r);
600 case found_back_inner: {
601 cache_search_data_for_backtracking(back_inner,
this, tag, depth, prefix);
602 prefix_res = chunk_search(meas_fct, back_inner, depth, p, prefix_res, r);
605 case found_back_outer: {
606 cache_search_data_for_backtracking(back_outer,
this, tag, depth, prefix);
607 prefix_res = chunk_search(meas_fct, back_outer, depth, p, prefix_res, r);
610 case found_nowhere: {
618 template <
class Pred>
619 measured_type backtrack_search(
const Pred& p,
const measured_type prefix,
620 const Top_item_base*& r)
const {
621 using tree_pointer_type =
union {
622 const self_type* layer;
623 const_chunk_pointer interior_node;
624 const Top_item_base* chunk;
627 tree_pointer_type ptr_cur,
629 tree_pointer_type& ptr_next,
631 measured_type& prefix_next) {
633 case annotation::PARENT_POINTER_BOOTSTRAP_INTERIOR_NODE: {
634 const_chunk_pointer nd = ptr_cur.interior_node;
635 tag_next = nd->annotation.parent.get_tag();
636 ptr_next.interior_node = nd->annotation.parent.template get_pointer<const_chunk_pointer>();
637 depth_next = nd->annotation.parent.get_depth();
638 prefix_next = nd->annotation.parent.template get_prefix<measured_type>();
641 case annotation::PARENT_POINTER_BOOTSTRAP_LAYER_NODE: {
642 const self_type* layer = ptr_cur.
layer;
643 tag_next = layer->annotation.parent.get_tag();
644 ptr_next.layer = layer->annotation.parent.template get_pointer<const self_type*>();
645 depth_next = layer->annotation.parent.get_depth();
646 prefix_next = layer->annotation.parent.template get_prefix<measured_type>();
649 case annotation::PARENT_POINTER_CHUNK: {
650 const Top_item_base* chunk = ptr_cur.chunk;
651 tag_next = chunk->annotation.parent.get_tag();
652 ptr_next.interior_node = chunk->annotation.parent.template get_pointer<const_chunk_pointer>();
653 depth_next = chunk->annotation.parent.get_depth();
654 prefix_next = chunk->annotation.parent.template get_prefix<measured_type>();
657 case annotation::PARENT_POINTER_UNINITIALIZED:
660 tag_next = annotation::PARENT_POINTER_UNINITIALIZED;
661 ptr_next.interior_node =
nullptr;
663 prefix_next = algebra_type::identity();
668 assert(r !=
nullptr);
670 measured_type prefix_cur = r->annotation.prefix.template get_cached<measured_type>();
672 tree_pointer_type ptr_cur;
674 int depth_cur = r->annotation.parent.get_depth() - 1;
676 auto finished_backtracking = [&] (measured_type m) {
677 measured_type n = algebra_type::combine(prefix_cur, m);
678 return !p(prefix_cur) && p(n);
682 case annotation::PARENT_POINTER_BOOTSTRAP_INTERIOR_NODE: {
683 const_chunk_pointer nd = ptr_cur.interior_node;
684 if (finished_backtracking(nd->get_cached())) {
685 return chunk_search(meas_fct, *nd, depth_cur, p, prefix_cur, r);
689 case annotation::PARENT_POINTER_BOOTSTRAP_LAYER_NODE: {
690 const self_type* layer = ptr_cur.layer;
691 if (finished_backtracking(layer->get_cached())) {
692 return layer->rec_search(meas_fct, depth_cur, p, prefix_cur, r);
696 case annotation::PARENT_POINTER_CHUNK: {
697 const Top_item_base* chunk = ptr_cur.chunk;
699 if (finished_backtracking(top_meas_fct(chunk))) {
705 case annotation::PARENT_POINTER_UNINITIALIZED: {
706 return rec_search(meas_fct, depth0, p, prefix, r);
714 get_parent_linkage(tag_cur, ptr_cur, tag_cur, ptr_cur, depth_cur, prefix_cur);
721 template <
class Pred>
722 measured_type search(
const Pred& p, measured_type prefix,
const Top_item_base*& r)
const {
724 if (annotation_type::finger_search_enabled && r !=
nullptr)
725 return backtrack_search(p, prefix, r);
727 return rec_search(meas_fct, depth0, p, prefix, r);
731 template <
class Pred>
732 measured_type
split(
const Pred& p, measured_type prefix, cached_item_type& x, self_type& other) {
733 assert(other.empty() && other.is_shallow());
739 prefix = chunk_split(p, prefix, shallow_chunk, x, other.shallow_chunk);
741 other.middle =
new layer();
743 ensure_empty_inner();
745 prefix = search_in_layer(p, prefix, pos);
747 case found_front_outer: {
748 prefix = chunk_split(p, prefix, front_outer, x, other.front_outer);
749 std::swap(middle, other.middle);
750 back_outer.swap(other.back_outer);
753 case found_front_inner: {
758 back_outer.swap(other.back_outer);
760 prefix = middle->split(p, prefix, y, *(other.middle));
762 chunk_pointer c = chunk_pointer_of_cached_item(y);
764 chunk_free_when_empty(c);
765 prefix = chunk_split(p, prefix, back_outer, x, other.front_outer);
768 case found_back_inner: {
772 case found_back_outer: {
773 prefix = chunk_split(p, prefix, back_outer, x, other.back_outer);
776 case found_nowhere: {
783 other.reset_cached();
785 restore_both_outer_empty_middle_empty();
786 other.restore_both_outer_empty_middle_empty();
794 void push_buffer_back(chunk_type& c) {
795 size_t csize = c.size();
798 }
else if (middle->empty()) {
799 push_buffer_back_force(c);
801 chunk_pointer b = chunk_pointer_of_cached_item(middle->back());
802 size_t bsize = b->size();
803 if (bsize + csize > Chunk_capacity) {
804 push_buffer_back_force(c);
807 c.transfer_from_front_to_back(meas_fct, *b, csize);
808 middle->push_back(cached_item_of_chunk_pointer(b));
814 void push_buffer_front(chunk_type& c) {
815 size_t csize = c.size();
818 }
else if (middle->empty()) {
819 push_buffer_front_force(c);
821 chunk_pointer b = chunk_pointer_of_cached_item(middle->front());
822 size_t bsize = b->size();
823 if (bsize + csize > Chunk_capacity) {
824 push_buffer_front_force(c);
827 c.transfer_from_back_to_front(meas_fct, *b, csize);
828 middle->push_front(cached_item_of_chunk_pointer(b));
834 void concat(self_type& other) {
835 if (other.is_shallow()) {
836 size_type nb = other.shallow_chunk.size();
837 for (
int i = 0; i < nb; i++) {
838 cached_item_type x = other.shallow_chunk.pop_front(meas_fct);
841 }
else if (is_shallow()) {
843 size_type nb = other.shallow_chunk.size();
844 for (
int i = 0; i < nb; i++) {
845 cached_item_type x = other.shallow_chunk.pop_back(meas_fct);
850 push_buffer_back(back_inner);
851 push_buffer_back(back_outer);
852 other.push_buffer_front(other.front_inner);
853 other.push_buffer_front(other.front_outer);
855 if (! middle->empty() && ! other.middle->empty()) {
856 chunk_pointer c1 = chunk_pointer_of_cached_item(middle->back());
857 chunk_pointer c2 = chunk_pointer_of_cached_item(other.middle->front());
860 if (nb1 + nb2 <= Chunk_capacity) {
862 other.middle->pop_front();
863 c2->transfer_from_front_to_back(meas_fct, *c1, nb2);
864 chunk_free_when_empty(c2);
865 middle->push_back(cached_item_of_chunk_pointer(c1));
869 back_inner.swap(other.back_inner);
870 back_outer.swap(other.back_outer);
872 middle->concat(*(other.middle));
874 cached = algebra_type::combine(cached, other.cached);
876 restore_both_outer_empty_middle_empty();
878 other.convert_deep_to_shallow();
879 assert(other.empty());
883 void rec_check(
int depth)
const {
884 #ifdef BOOTCHUNKEDSEQ_CHECK
885 if (! is_shallow()) {
886 measured_type wfo = chunk_check_weight(depth, front_outer);
887 measured_type wfi = chunk_check_weight(depth, front_inner);
888 measured_type wbi = chunk_check_weight(depth, back_inner);
889 measured_type wbo = chunk_check_weight(depth, back_outer);
890 measured_type wmid = middle->get_cached();
891 measured_type w = algebra_type::combine(wfo, wfi);
892 w = algebra_type::combine(w, wmid);
893 w = algebra_type::combine(w, wbi);
894 w = algebra_type::combine(w, wbo);
895 size_t sfo = front_outer.size();
896 size_t sfi = front_inner.size();
897 size_t sbi = back_inner.size();
898 size_t sbo = back_outer.size();
899 assert (sfo != 0 || sfi == 0);
900 assert (sbo != 0 || sbi == 0);
901 assert (sfi == 0 || sfi == Chunk_capacity);
902 assert (sbi == 0 || sbi == Chunk_capacity);
903 assert (sfo + sbo > 0);
905 middle->rec_check(depth+1);
910 void rec_print(
int depth) {
913 rec_print_chunk(depth, shallow_chunk);
920 rec_print_chunk(depth, front_outer);
922 rec_print_chunk(depth, front_inner);
924 rec_print_chunk(depth, back_inner);
926 rec_print_chunk(depth, back_outer);
928 middle->rec_print(depth+1);
932 template <
class Add_edge,
class Process_chunk>
933 void rec_reveal_internal_structure(
const Add_edge& add_edge,
934 const Process_chunk& process_chunk,
937 add_edge(
this, &shallow_chunk);
938 rec_reveal_internal_structure_of_chunk(add_edge, process_chunk, depth, shallow_chunk);
940 add_edge((
void*)
this, (
void*)&front_outer);
941 add_edge((
void*)
this, (
void*)&front_inner);
942 add_edge((
void*)
this, (
void*)middle);
943 add_edge((
void*)
this, (
void*)&back_inner);
944 add_edge((
void*)
this, (
void*)&back_outer);
945 rec_reveal_internal_structure_of_chunk(add_edge, process_chunk, depth, front_outer);
946 rec_reveal_internal_structure_of_chunk(add_edge, process_chunk, depth, front_inner);
947 middle->rec_reveal_internal_structure(add_edge, process_chunk, depth+1);
948 rec_reveal_internal_structure_of_chunk(add_edge, process_chunk, depth, back_inner);
949 rec_reveal_internal_structure_of_chunk(add_edge, process_chunk, depth, back_outer);
953 template <
class Body>
954 void rec_for_each(
int depth,
const Body& f)
const {
956 chunk_for_each(depth, f, shallow_chunk);
958 chunk_for_each(depth, f, front_outer);
959 chunk_for_each(depth, f, front_inner);
960 middle->rec_for_each(depth+1, f);
961 chunk_for_each(depth, f, back_inner);
962 chunk_for_each(depth, f, back_outer);
973 static inline cached_item_type cached_item_of_chunk_pointer(chunk_pointer c) {
976 measured_type w = c->get_cached();
977 cached_item_type v(x, w);
981 static inline cached_item_type cached_item_of_top_item(top_item_type y, measured_type w) {
984 cached_item_type v(x, w);
988 static inline chunk_pointer chunk_pointer_of_cached_item(
const cached_item_type v) {
989 return v.get_item().rec_item;
992 static inline const_chunk_pointer const_chunk_pointer_of_cached_item(
const cached_item_type v) {
993 return v.get_item().rec_item;
996 static inline top_item_type top_item_of_cached_item(
const cached_item_type v) {
997 return v.get_item().top_item;
1003 static inline chunk_pointer chunk_alloc() {
1004 return new chunk_type();
1007 static inline void chunk_free_when_empty(chunk_pointer c) {
1014 static inline void chunk_deep_free(
int depth, chunk_type& c) {
1015 c.for_each([depth] (cached_item_type& v) {
1017 top_item_type x = top_item_of_cached_item(v);
1018 Top_item_deleter::dealloc(x);
1020 chunk_pointer d = chunk_pointer_of_cached_item(v);
1021 chunk_deep_free(depth-1, *d);
1027 static inline void chunk_deep_free_and_destruct(
int depth, chunk_type& c) {
1028 chunk_deep_free(depth, c);
1034 static void chunk_deep_copy(
int depth,
const chunk_type& src, chunk_type& dst) {
1035 src.for_each([&] (
const cached_item_type& v) {
1038 top_item_type orig = top_item_of_cached_item(v);
1040 c = cached_item_of_top_item(copy, v.get_cached());
1042 const_chunk_pointer orig = const_chunk_pointer_of_cached_item(v);
1043 chunk_pointer copy =
new chunk_type();
1044 chunk_deep_copy(depth-1, *orig, *copy);
1045 c = cached_item_of_chunk_pointer(copy);
1047 measure_type meas_fct;
1048 dst.push_back(meas_fct, c);
1053 template <
class Body>
1054 static void chunk_for_each(
int depth,
const Body& f,
const chunk_type& c) {
1055 c.for_each([&] (
const cached_item_type& v) {
1057 top_item_type item = top_item_of_cached_item(v);
1060 const_chunk_pointer c = const_chunk_pointer_of_cached_item(v);
1061 chunk_for_each(depth-1, f, *c);
1066 static inline measured_type chunk_check_weight(
int depth,
const chunk_type& c) {
1067 measured_type wref = c.get_cached();
1068 measured_type w = algebra_type::identity();
1069 c.for_each([&w, depth] (cached_item_type& v) {
1071 w = algebra_type::combine(w, v.get_cached());
1073 measured_type wiref = v.get_cached();
1074 chunk_pointer d = chunk_pointer_of_cached_item(v);
1075 measured_type wi = chunk_check_weight(depth-1, *d);
1079 w = algebra_type::combine(w, wiref);
1088 static void rec_print_item(
int depth, cached_item_type& x) {
1090 top_item_type y = top_item_of_cached_item(x);
1091 Top_item_base v = *y;
1092 std::cout << v.value;
1094 chunk_pointer d = chunk_pointer_of_cached_item(x);
1095 rec_print_chunk(depth-1, *d);
1099 template <
class Add_edge,
class Process_chunk>
1100 static void rec_reveal_internal_structure_of_chunk(
const Add_edge& add_edge,
1101 const Process_chunk& process_chunk,
1103 const chunk_type& c) {
1104 c.for_each([&] (
const cached_item_type& v) {
1106 top_item_type item = top_item_of_cached_item(v);
1107 add_edge((
void*)&c, (
void*)item);
1108 process_chunk(item);
1110 const_chunk_pointer d = const_chunk_pointer_of_cached_item(v);
1111 add_edge((
void*)&c, (
void*)d);
1112 rec_reveal_internal_structure_of_chunk(add_edge, process_chunk, depth-1, *d);
1117 static void rec_print_chunk(
int depth,
const chunk_type& c) {
1119 std::cout << c.get_cached();
1121 c.for_each([&] (cached_item_type& x) {
1122 rec_print_item(depth, x);
1139 template <
class Pred>
1140 static measured_type chunk_split(
const Pred& p,
1141 measured_type prefix,
1143 cached_item_type& x,
1145 measure_type meas_fct;
1146 return src.split(meas_fct, p, prefix, x, dst);
1151 using chunk_search_type = itemsearch::search_in_chunk<chunk_type, algebra_type>;
1152 using chunk_search_result_type =
typename chunk_search_type::result_type;
1159 template <
class Pred>
1160 static chunk_search_result_type chunk_search(
const Pred& p,
1161 measured_type prefix,
1163 chunk_search_type search;
1164 measure_type meas_fct;
1165 return search(c, meas_fct, prefix, p);
1172 static constexpr
int depth0 = 0;
1183 top_layer.rec_copy(depth0, other.top_layer);
1187 top_layer.swap(other.top_layer);
1191 return top_layer.empty();
1195 return top_layer.get_cached();
1200 top_layer.push_front(v);
1206 top_layer.push_back(v);
1212 return top_item_of_cached_item(v);
1217 return top_item_of_cached_item(v);
1228 return top_item_of_cached_item(v);
1234 return top_item_of_cached_item(v);
1241 top_layer.concat(other.top_layer);
1246 template <
class Pred>
1248 const Top_item_base*& c)
const {
1249 prefix = top_layer.search(p, prefix, c);
1253 template <
class Pred>
1256 measured_type prefix,
1260 prefix = top_layer.split(p, prefix, v, other.top_layer);
1261 x = top_item_of_cached_item(v);
1268 template <
class Pred>
1271 measured_type prefix,
1274 prefix =
split(meas, p, prefix, v, other);
1281 template <
class Body>
1283 top_layer.rec_for_each(0, f);
1292 top_layer.push_front(v);
1297 top_layer.push_back(v);
1343 assert(n <= size_orig);
1348 if (n == size_orig) {
1351 auto p = [n] (measured_type m) {
1356 prefix =
split(top_meas, p, prefix, other);
1363 assert(size_other + size_cur == size_orig);
1364 assert(size_cur == n);
1365 assert(size_other + n == size_orig);
1369 #ifdef BOOTCHUNKEDSEQ_CHECK
1370 top_layer.rec_check(depth0);
1376 template <
class ItemPr
inter>
1378 top_layer.rec_print(depth0);
1381 template <
class Add_edge,
class Process_chunk>
1383 const Process_chunk& process_chunk)
const {
1384 add_edge((
void*)
this, (
void*)&top_layer);
1385 top_layer.rec_reveal_internal_structure(add_edge, process_chunk, depth0);
cdeque(const self_type &other)
void push_back(const top_measure_type &top_meas, const value_type &x)
enum{PARENT_POINTER_BOOTSTRAP_INTERIOR_NODE=0, PARENT_POINTER_BOOTSTRAP_LAYER_NODE=1, PARENT_POINTER_CHUNK=2, PARENT_POINTER_UNINITIALIZED=3} parent_pointer_tag
void reveal_internal_structure(const Add_edge &add_edge, const Process_chunk &process_chunk) const
base::ringbuffer_ptr< base::heap_allocator< Item, Capacity+1 >> ringbuffer_ptr
void concat(const top_measure_type &, self_type &other)
measured_type split(const top_measure_type &meas, const Pred &p, measured_type prefix, self_type &other)
measured_type search_for_chunk(const Pred &p, measured_type prefix, const Top_item_base *&c) const
value_type pop_front(const top_measure_type &)
Defines an interface for attaching data to nodes in the underlying tree structure of the chunked sequ...
measured_type split(const top_measure_type &, const Pred &p, measured_type prefix, value_type &x, self_type &other)
sized_cache_measure top_cache_type
Cached_item(const Cached_item &other)
void copy(typename Alloc::pointer destination, typename Alloc::const_pointer source, typename Alloc::size_type num)
Polymorphic array copy.
measure_type get_measure() const
void concat(self_type &other)
typename top_cache_type::measure_type top_measure_type
Definitions of a few cached-measurement policies.
value_type pop_back(const top_measure_type &)
Measured get_cached() const
void push_front(const value_type &x, measured_type w)
Routines for finding an item in a container via binary search.
void for_each(const Body &f) const
measured_type get_cached() const
Cached_item(Item &item, Measured cached)
measured_type operator()(const cached_item_type &x) const
void split(size_type n, self_type &other)
measured_type operator()(const cached_item_type *lo, const cached_item_type *hi) const
void swap(self_type &other)
void push_back(const value_type &x, measured_type w)
Representation of a chunk.
void push_front(const top_measure_type &top_meas, const value_type &x)
enum{begin, end} position_type
typename top_cache_type::measured_type top_measured_type
typename top_cache_type::algebra_type top_algebra_type