chunkedseq
container library for large in-memory data sets
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Modules Pages
bootchunkedseq.hpp
Go to the documentation of this file.
1 
13 #include <memory>
14 #include <assert.h>
15 #include <vector>
16 #include <iostream>
17 
18 #include "atomic.hpp"
19 #include "chunk.hpp"
20 #include "fixedcapacity.hpp"
21 #include "cachedmeasure.hpp"
22 #include "itemsearch.hpp"
23 #include "annotation.hpp"
24 
25 #ifndef _PASL_DATA_BOOTCHUNKEDSEQNEW_H_
26 #define _PASL_DATA_BOOTCHUNKEDSEQNEW_H_
27 
28 namespace pasl {
29 namespace data {
30 namespace chunkedseq {
31 namespace bootchunkedseq {
32 
33 /***********************************************************************/
34 // Pair of an item and a cached measure
35 
36 template <class Item, class Measured>
37 class Cached_item {
38 
39 private:
40  Item item;
41  Measured cached;
42 
43 public:
45  Cached_item(Item& item, Measured cached) : item(item), cached(cached) { }
46  Cached_item(const Cached_item& other) {
47  item = other.item;
48  cached = other.cached;
49  }
50 
51  Measured get_cached() const { return cached; }
52 
53  Item get_item() const { return item; }
54 };
55 
56 
57 /***********************************************************************/
58 
59 template <class Top_item_base, // use "foo" to obtain a sequence of "foo*" items
60  int Chunk_capacity = 32,
62  class Top_item_deleter = Pointer_deleter, // provides: static void dealloc(foo* x)
63  class Top_item_copier = Pointer_deep_copier, // provides: static void copy(foo* x)
64  template<class Item, int Capacity, class Item_alloc> class Chunk_struct = fixedcapacity::heap_allocated::ringbuffer_ptr,
65  class Size_access=itemsearch::no_size_access
66  >
67 class cdeque {
68 
69  using self_type = cdeque<Top_item_base, Chunk_capacity, Cached_measure,
70  Top_item_deleter, Top_item_copier, Chunk_struct, Size_access>;
71 
72 public:
73 
74  using size_type = size_t; // TODO: from template argument ?
75 
76  using top_cache_type = Cached_measure;
77  using top_measured_type = typename top_cache_type::measured_type;
78  using top_algebra_type = typename top_cache_type::algebra_type;
79  using top_measure_type = typename top_cache_type::measure_type;
80 
81 private:
82 
83  union item_type; // Forward declaration.
84 
86 
87  class cache_type {
88  public:
89 
90  using size_type = size_t;
91  using measured_type = top_measured_type;
92  using algebra_type = top_algebra_type;
93  class measure_type {
94  public:
95 
96  measured_type operator()(const cached_item_type& x) const {
97  return x.get_cached();
98  }
99 
100  measured_type operator()(const cached_item_type* lo, const cached_item_type* hi) const {
101  measured_type m = algebra_type::identity();
102  for (auto p = lo; p < hi; p++)
103  m = algebra_type::combine(m, operator()(*p));
104  return m;
105  }
106 
107  };
108 
109  static void swap(measured_type& x, measured_type& y) {
110  std::swap(x, y);
111  }
112 
113  };
114 
115  using measured_type = typename cache_type::measured_type;
116  using algebra_type = typename cache_type::algebra_type;
117  using measure_type = typename cache_type::measure_type;
118 
119  using queue_type = Chunk_struct<cached_item_type, Chunk_capacity, std::allocator<cached_item_type> >;
120 
121  using annotation_type = typename Top_item_base::annotation_type;
122 
124  using chunk_pointer = chunk_type*;
125  using const_chunk_pointer = const chunk_type*;
126 
127  measure_type meas_fct;
128 
129  using top_item_type = Top_item_base*;
130 
131  union item_type { // 1 word
132  top_item_type top_item;
133  chunk_pointer rec_item;
134  };
135 
136  // Representation of the layers:
137 
138  /*---------------------------------------------------------------------*/
139 
140  class layer {
141  public:
142  measure_type meas_fct; // todo: get rid of this!
143 
144  measured_type cached; // only used for deep layers
145  chunk_type front_outer, front_inner, back_inner, back_outer;
146  chunk_type& shallow_chunk /*= front_outer */; // shallow layers only use front_outer
147  layer* middle; // null iff shallow layer
148  // if the structure is empty, then it must be shallow
149  // w.r.t. paper:
150  // - we allow deep structures to contain a single item
151  // - we enforce that if an outer buffer is empty then the corresponding inner buffer is empty
152  // - we enforce that if both outer buffers are empty then the middle is empty
153  annotation_type annotation;
154 
155  using self_type = layer;
156 
157  layer() : shallow_chunk(front_outer) {
158  // build a shallow layer representing an empty structure
159  middle = NULL;
160  }
161 
162  ~layer() {
163  // recursively deallocate layers
164  if (middle != NULL)
165  delete middle;
166  }
167 
168  /* note: our combine operator is not necessarily commutative.
169  * as such, we need to be careful to get the order of the
170  * operands right when we increment on the front and back.
171  */
172 
173  void incr_front(measured_type& cached, const measured_type& m) {
174  cached = algebra_type::combine(m, cached);
175  }
176 
177  void incr_back(measured_type& cached, const measured_type& m) {
178  cached = algebra_type::combine(cached, m);
179  }
180 
181  void decr_front(measured_type& cached, const measured_type& m) {
182  incr_front(cached, algebra_type::inverse(m));
183  }
184 
185  void decr_back(measured_type& cached, const measured_type& m) {
186  incr_back(cached, algebra_type::inverse(m));
187  }
188 
189  void rec_copy(int depth, const self_type& other) {
190  // alternative: replace the next 2 lines with "this.~layer()".
191  if (middle != NULL)
192  delete middle;
193  cached = other.cached;
194  chunk_deep_copy(depth, other.front_outer, front_outer);
195  chunk_deep_copy(depth, other.front_inner, front_inner);
196  chunk_deep_copy(depth, other.back_inner, back_inner);
197  chunk_deep_copy(depth, other.back_outer, back_outer);
198  if (other.middle == NULL) { // other is shallow
199  middle = NULL;
200  } else { // other is deep
201  middle = new layer();
202  middle->rec_copy(depth+1, *(other.middle));
203  }
204  }
205 
206  void swap(self_type& other) {
207  std::swap(cached, other.cached);
208  std::swap(middle, other.middle);
209  front_outer.swap(other.front_outer);
210  front_inner.swap(other.front_inner);
211  back_inner.swap(other.back_inner);
212  back_outer.swap(other.back_outer);
213  }
214 
215  void convert_deep_to_shallow() {
216  delete middle;
217  middle = NULL;
218  }
219 
220  inline bool is_shallow() const {
221  return (middle == NULL);
222  }
223 
224  void reset_cached() {
225  if (is_shallow())
226  return;
227  cached = algebra_type::identity();
228  incr_back(cached, front_outer.get_cached());
229  incr_back(cached, front_inner.get_cached());
230  incr_back(cached, middle->get_cached());
231  incr_back(cached, back_inner.get_cached());
232  incr_back(cached, back_outer.get_cached());
233  }
234 
235  // assumes deep layer;
236  // take a chunk "c" and push it into the front of the middle sequence,
237  // leaving "c" empty
238  inline void push_buffer_front_force(chunk_type& c) {
239  chunk_pointer d = chunk_alloc();
240  c.swap(*d);
241  middle->push_front(cached_item_of_chunk_pointer(d));
242  }
243 
244  // symmetric to push_buffer_front
245  inline void push_buffer_back_force(chunk_type& c) {
246  chunk_pointer d = chunk_alloc();
247  c.swap(*d);
248  middle->push_back(cached_item_of_chunk_pointer(d));
249  }
250 
251  inline bool empty() const {
252  if (is_shallow())
253  return shallow_chunk.empty();
254  else
255  return front_outer.empty() && back_outer.empty();
256  }
257 
258  inline measured_type get_cached() const {
259  if (is_shallow())
260  return shallow_chunk.get_cached();
261  else
262  return cached;
263  }
264 
265  void push_front(cached_item_type x) {
266  if (is_shallow()) {
267  if (! shallow_chunk.full()) {
268  shallow_chunk.push_front(meas_fct, x);
269  } else {
270  // from shallow to deep
271  middle = new layer();
272  cached = shallow_chunk.get_cached();
273  shallow_chunk.swap(back_outer);
274  // todo: share the next 2 lines with the same lines below
275  incr_front(cached, x.get_cached());
276  front_outer.push_front(meas_fct, x);
277  }
278  } else { // deep
279  if (front_outer.full()) {
280  if (front_inner.full())
281  push_buffer_front_force(front_inner);
282  front_outer.swap(front_inner);
283  assert(front_outer.empty());
284  }
285  incr_front(cached, x.get_cached());
286  front_outer.push_front(meas_fct, x);
287  }
288  }
289 
290  void push_back(cached_item_type x) {
291  if (is_shallow()) {
292  if (! shallow_chunk.full()) {
293  shallow_chunk.push_back(meas_fct, x);
294  } else {
295  // from shallow to deep
296  middle = new layer();
297  cached = shallow_chunk.get_cached();
298  // noop: shallow_chunk.swap(front_inner);
299  incr_back(cached, x.get_cached());
300  back_outer.push_back(meas_fct, x);
301  }
302  } else { // deep
303  if (back_outer.full()) {
304  if (back_inner.full())
305  push_buffer_back_force(back_inner);
306  back_outer.swap(back_inner);
307  assert(back_outer.empty());
308  }
309  incr_back(cached, x.get_cached());
310  back_outer.push_back(meas_fct, x);
311  }
312  }
313 
314  cached_item_type& front() const {
315  if (is_shallow()) {
316  return shallow_chunk.front();
317  } else { // deep
318  assert(! front_outer.empty() || front_inner.empty());
319  if (! front_outer.empty()) {
320  return front_outer.front();
321  } else if (! middle->empty()) {
322  chunk_pointer c = chunk_pointer_of_cached_item(middle->front());
323  return c->front();
324  } else if (! back_inner.empty()) {
325  return back_inner.front();
326  } else {
327  assert(! back_outer.empty());
328  return back_outer.front();
329  }
330  }
331  }
332 
333  cached_item_type& back() const {
334  if (is_shallow()) {
335  return shallow_chunk.back();
336  } else { // deep
337  assert(! back_outer.empty() || back_inner.empty());
338  if (! back_outer.empty()) {
339  return back_outer.back();
340  } else if (! middle->empty()) {
341  chunk_pointer c = chunk_pointer_of_cached_item(middle->back());
342  return c->back();
343  } else if (! front_inner.empty()) {
344  return front_inner.back();
345  } else {
346  assert(! front_outer.empty());
347  return front_outer.back();
348  }
349  }
350  }
351 
352  // assumes a deep structure;
353  // ensures that if the structure is not empty then front_outer is not empty,
354  // and if the structure is empty then it becomes shallow.
355  // remark: acting on the back chunks is not needed for invariants, but helps for efficiency in typical application cases.
356  void try_populate_front_outer() {
357  if (front_outer.empty()) {
358  if (! front_inner.empty()) {
359  front_inner.swap(front_outer);
360  } else if (! middle->empty()) {
361  chunk_pointer c = chunk_pointer_of_cached_item(middle->pop_front());
362  front_outer.swap(*c);
363  chunk_free_when_empty(c);
364  } else if (! back_inner.empty()) {
365  back_inner.swap(front_outer);
366  } else if (! back_outer.empty()) {
367  back_outer.swap(front_outer);
368  } else { // empty structure
369  convert_deep_to_shallow();
370  }
371  }
372  }
373 
374  void try_populate_back_outer() {
375  if (back_outer.empty()) {
376  if (! back_inner.empty()) {
377  back_inner.swap(back_outer);
378  } else if (! middle->empty()) {
379  chunk_pointer c = chunk_pointer_of_cached_item(middle->pop_back());
380  back_outer.swap(*c);
381  chunk_free_when_empty(c);
382  } else if (! front_inner.empty()) {
383  front_inner.swap(back_outer);
384  } else if (! front_outer.empty()) {
385  front_outer.swap(back_outer);
386  } else { // empty structure
387  convert_deep_to_shallow();
388  }
389  }
390  }
391 
392  cached_item_type pop_front() {
393  if (is_shallow()) {
394  return shallow_chunk.pop_front(meas_fct);
395  } else { // deep
396  if (front_outer.empty()) {
397  assert(front_inner.empty());
398  if (! middle->empty()) {
399  chunk_pointer c = chunk_pointer_of_cached_item(middle->pop_front());
400  front_outer.swap(*c);
401  chunk_free_when_empty(c);
402  } else if (! back_inner.empty()) {
403  back_inner.swap(front_outer);
404  } else if (! back_outer.empty()) {
405  back_outer.swap(front_outer);
406  }
407  }
408  assert(! front_outer.empty());
409  cached_item_type x = front_outer.pop_front(meas_fct);
410  if (algebra_type::has_inverse)
411  decr_front(cached, x.get_cached());
412  else
413  reset_cached();
414  try_populate_front_outer();
415  return x;
416  }
417  }
418 
419  cached_item_type pop_back() {
420  if (is_shallow()) {
421  return shallow_chunk.pop_back(meas_fct);
422  } else { // deep
423  if (back_outer.empty()) {
424  assert(back_inner.empty());
425  if (! middle->empty()) {
426  chunk_pointer c = chunk_pointer_of_cached_item(middle->pop_back());
427  back_outer.swap(*c);
428  chunk_free_when_empty(c);
429  } else if (! front_inner.empty()) {
430  front_inner.swap(back_outer);
431  } else if (! front_outer.empty()) {
432  front_outer.swap(back_outer);
433  }
434  }
435  assert(! back_outer.empty());
436  cached_item_type x = back_outer.pop_back(meas_fct);
437  if (algebra_type::has_inverse)
438  decr_back(cached, x.get_cached());
439  else
440  reset_cached();
441  try_populate_back_outer();
442  return x;
443  }
444  }
445 
446  // assumption: invariant "both outer empty implies middle empty" may be broken;
447  // calling this function restores it; or turns level into shallow if all empty
448  void restore_both_outer_empty_middle_empty() {
449  if (is_shallow())
450  return;
451  if (front_outer.empty() && back_outer.empty()) {
452  if (middle->empty()) {
453  convert_deep_to_shallow();
454  } else {
455  // pop to the front (to the back would also work)
456  chunk_pointer c = chunk_pointer_of_cached_item(middle->pop_front());
457  front_outer.swap(*c);
458  chunk_free_when_empty(c);
459  }
460  }
461  }
462 
463  void ensure_empty_inner() {
464  if (! front_inner.empty())
465  push_buffer_front_force(front_inner);
466  if (! back_inner.empty())
467  push_buffer_back_force(back_inner);
468  }
469 
470  using position_type = enum {
471  found_front_outer,
472  found_front_inner,
473  found_middle,
474  found_back_inner,
475  found_back_outer,
476  found_nowhere
477  };
478 
479  template <class Pred>
480  measured_type search_in_layer(const Pred& p, measured_type prefix, position_type& pos) const {
481  measured_type cur = prefix; // prefix including current chunk
482  // common code for shallow and deep
483  if (! front_outer.empty()) {
484  prefix = cur;
485  cur = algebra_type::combine(cur, front_outer.get_cached());
486  if (p(cur)) {
487  pos = found_front_outer;
488  return prefix;
489  }
490  }
491  // special case for shallow
492  {
493  if (is_shallow()) {
494  prefix = cur;
495  pos = found_nowhere;
496  return prefix;
497  }
498  }
499  if (! front_inner.empty()) {
500  prefix = cur;
501  cur = algebra_type::combine(cur, front_inner.get_cached());
502  if (p(cur)) {
503  pos = found_front_inner;
504  return prefix;
505  }
506  }
507  if (! middle->empty()) {
508  prefix = cur;
509  cur = algebra_type::combine(prefix, middle->get_cached());
510  if (p(cur)) {
511  pos = found_middle;
512  return prefix;
513  }
514  }
515  if (! back_inner.empty()) {
516  prefix = cur;
517  cur = algebra_type::combine(cur, back_inner.get_cached());
518  if (p(cur)) {
519  pos = found_back_inner;
520  return prefix;
521  }
522  }
523  if (! back_outer.empty()) {
524  prefix = cur;
525  cur = algebra_type::combine(cur, back_outer.get_cached());
526  if (p(cur)) {
527  pos = found_back_outer;
528  return prefix;
529  }
530  }
531  prefix = cur;
532  pos = found_nowhere;
533  return prefix;
534  }
535 
536  measure_type get_measure() const {
537  return meas_fct;
538  }
539 
540  template <class Node, class Pointer>
541  static void cache_search_data_for_backtracking(const Node& nd,
542  Pointer ptr,
544  const int depth,
545  const measured_type prefix) {
546  nd.annotation.parent.set_pointer(ptr, tag);
547  assert(!annotation_type::finger_search_enabled ||
548  nd.annotation.parent.template get_tag() == tag);
549  nd.annotation.parent.set_depth(depth);
550  nd.annotation.parent.template set_prefix<measured_type>(prefix);
551  }
552 
553  template <class Pred>
554  static measured_type chunk_search(const measure_type& meas_fct,
555  const chunk_type& c,
556  const int depth,
557  const Pred& p,
558  const measured_type prefix,
559  const Top_item_base*& r) {
560  chunk_search_type search;
561  chunk_search_result_type s = search(c, meas_fct, prefix, p);
562  measured_type prefix_res = s.prefix;
563  size_type i = s.position - 1;
564  cached_item_type& t = c[i];
565  const annotation::parent_pointer_tag tag = annotation::PARENT_POINTER_BOOTSTRAP_INTERIOR_NODE;
566  if (depth == 0) {
567  r = top_item_of_cached_item(t);
568  cache_search_data_for_backtracking(*r, &c, tag, depth, prefix);
569  } else {
570  const_chunk_pointer tc = const_chunk_pointer_of_cached_item(t);
571  cache_search_data_for_backtracking(*tc, &c, tag, depth, prefix);
572  prefix_res = chunk_search(meas_fct, *tc, depth-1, p, prefix_res, r);
573  }
574  return prefix_res;
575  }
576 
577  template <class Pred>
578  measured_type rec_search(const measure_type& meas_fct, const int depth,
579  const Pred& p, const measured_type prefix,
580  const Top_item_base*& r) const {
581  position_type pos;
582  measured_type prefix_res = search_in_layer(p, prefix, pos);
583  const annotation::parent_pointer_tag tag = annotation::PARENT_POINTER_BOOTSTRAP_LAYER_NODE;
584  switch (pos) {
585  case found_front_outer: {
586  cache_search_data_for_backtracking(front_outer, this, tag, depth, prefix);
587  prefix_res = chunk_search(meas_fct, front_outer, depth, p, prefix_res, r);
588  break;
589  }
590  case found_front_inner: {
591  cache_search_data_for_backtracking(front_inner, this, tag, depth, prefix);
592  prefix_res = chunk_search(meas_fct, front_inner, depth, p, prefix_res, r);
593  break;
594  }
595  case found_middle: {
596  cache_search_data_for_backtracking(*middle, this, tag, depth, prefix);
597  prefix_res = middle->rec_search(meas_fct, depth+1, p, prefix_res, r);
598  break;
599  }
600  case found_back_inner: {
601  cache_search_data_for_backtracking(back_inner, this, tag, depth, prefix);
602  prefix_res = chunk_search(meas_fct, back_inner, depth, p, prefix_res, r);
603  break;
604  }
605  case found_back_outer: {
606  cache_search_data_for_backtracking(back_outer, this, tag, depth, prefix);
607  prefix_res = chunk_search(meas_fct, back_outer, depth, p, prefix_res, r);
608  break;
609  }
610  case found_nowhere: {
611  assert(false);
612  break;
613  }
614  } // end switch
615  return prefix_res;
616  }
617 
618  template <class Pred>
619  measured_type backtrack_search(const Pred& p, const measured_type prefix,
620  const Top_item_base*& r) const {
621  using tree_pointer_type = union {
622  const self_type* layer;
623  const_chunk_pointer interior_node;
624  const Top_item_base* chunk;
625  };
626  auto get_parent_linkage = [] (annotation::parent_pointer_tag tag_cur,
627  tree_pointer_type ptr_cur,
629  tree_pointer_type& ptr_next,
630  int& depth_next,
631  measured_type& prefix_next) {
632  switch (tag_cur) {
633  case annotation::PARENT_POINTER_BOOTSTRAP_INTERIOR_NODE: {
634  const_chunk_pointer nd = ptr_cur.interior_node;
635  tag_next = nd->annotation.parent.get_tag();
636  ptr_next.interior_node = nd->annotation.parent.template get_pointer<const_chunk_pointer>();
637  depth_next = nd->annotation.parent.get_depth();
638  prefix_next = nd->annotation.parent.template get_prefix<measured_type>();
639  break;
640  }
641  case annotation::PARENT_POINTER_BOOTSTRAP_LAYER_NODE: {
642  const self_type* layer = ptr_cur.layer;
643  tag_next = layer->annotation.parent.get_tag();
644  ptr_next.layer = layer->annotation.parent.template get_pointer<const self_type*>();
645  depth_next = layer->annotation.parent.get_depth();
646  prefix_next = layer->annotation.parent.template get_prefix<measured_type>();
647  break;
648  }
649  case annotation::PARENT_POINTER_CHUNK: {
650  const Top_item_base* chunk = ptr_cur.chunk;
651  tag_next = chunk->annotation.parent.get_tag();
652  ptr_next.interior_node = chunk->annotation.parent.template get_pointer<const_chunk_pointer>();
653  depth_next = chunk->annotation.parent.get_depth();
654  prefix_next = chunk->annotation.parent.template get_prefix<measured_type>();
655  break;
656  }
657  case annotation::PARENT_POINTER_UNINITIALIZED:
658  default: {
659  assert(false);
660  tag_next = annotation::PARENT_POINTER_UNINITIALIZED;
661  ptr_next.interior_node = nullptr;
662  depth_next = -1;
663  prefix_next = algebra_type::identity();
664  break;
665  }
666  } // end switch
667  };
668  assert(r != nullptr);
669  top_measure_type top_meas_fct;
670  measured_type prefix_cur = r->annotation.prefix.template get_cached<measured_type>();
671  annotation::parent_pointer_tag tag_cur = annotation::PARENT_POINTER_CHUNK;
672  tree_pointer_type ptr_cur;
673  ptr_cur.chunk = r;
674  int depth_cur = r->annotation.parent.get_depth() - 1;
675  while (true) {
676  auto finished_backtracking = [&] (measured_type m) {
677  measured_type n = algebra_type::combine(prefix_cur, m);
678  return !p(prefix_cur) && p(n);
679  };
680 
681  switch (tag_cur) {
682  case annotation::PARENT_POINTER_BOOTSTRAP_INTERIOR_NODE: {
683  const_chunk_pointer nd = ptr_cur.interior_node;
684  if (finished_backtracking(nd->get_cached())) {
685  return chunk_search(meas_fct, *nd, depth_cur, p, prefix_cur, r);
686  }
687  break;
688  }
689  case annotation::PARENT_POINTER_BOOTSTRAP_LAYER_NODE: {
690  const self_type* layer = ptr_cur.layer;
691  if (finished_backtracking(layer->get_cached())) {
692  return layer->rec_search(meas_fct, depth_cur, p, prefix_cur, r);
693  }
694  break;
695  }
696  case annotation::PARENT_POINTER_CHUNK: {
697  const Top_item_base* chunk = ptr_cur.chunk;
698  top_measure_type top_meas_fct;
699  if (finished_backtracking(top_meas_fct(chunk))) {
700  r = chunk;
701  return prefix_cur;
702  }
703  break;
704  }
705  case annotation::PARENT_POINTER_UNINITIALIZED: {
706  return rec_search(meas_fct, depth0, p, prefix, r);
707  }
708  default: {
709  assert(false);
710  r = nullptr;
711  return prefix_cur;
712  }
713  }
714  get_parent_linkage(tag_cur, ptr_cur, tag_cur, ptr_cur, depth_cur, prefix_cur);
715  }
716  assert(false);
717  r = nullptr;
718  return prefix_cur;
719  }
720 
721  template <class Pred>
722  measured_type search(const Pred& p, measured_type prefix, const Top_item_base*& r) const {
723  measure_type meas_fct = get_measure();
724  if (annotation_type::finger_search_enabled && r != nullptr)
725  return backtrack_search(p, prefix, r);
726  else
727  return rec_search(meas_fct, depth0, p, prefix, r);
728  }
729 
730  // precondition: other is empty (and therefore shallow)
731  template <class Pred>
732  measured_type split(const Pred& p, measured_type prefix, cached_item_type& x, self_type& other) {
733  assert(other.empty() && other.is_shallow());
734  // TODO: needed? --equivalent to: copy_measure_to(other);
735  // other.meas_fct = meas_fct;
736  if (is_shallow()) {
737  // if (shallow_chunk.empty()) TODO?
738  // return prefix;
739  prefix = chunk_split(p, prefix, shallow_chunk, x, other.shallow_chunk);
740  } else { // deep
741  other.middle = new layer(); // turn other from shallow to deep (might be undone)
742  // other.cached will be reset later
743  ensure_empty_inner();
744  position_type pos;
745  prefix = search_in_layer(p, prefix, pos);
746  switch (pos) {
747  case found_front_outer: {
748  prefix = chunk_split(p, prefix, front_outer, x, other.front_outer);
749  std::swap(middle, other.middle);
750  back_outer.swap(other.back_outer);
751  break;
752  }
753  case found_front_inner: {
754  assert(false); // thanks to the call to ensure_empty_inner()
755  break;
756  }
757  case found_middle: {
758  back_outer.swap(other.back_outer);
759  cached_item_type y;
760  prefix = middle->split(p, prefix, y, *(other.middle));
761  // TODO: use a function to factorize next 3 lines with other places
762  chunk_pointer c = chunk_pointer_of_cached_item(y);
763  back_outer.swap(*c);
764  chunk_free_when_empty(c);
765  prefix = chunk_split(p, prefix, back_outer, x, other.front_outer);
766  break;
767  }
768  case found_back_inner: {
769  assert(false); // thanks to the call to ensure_empty_inner()
770  break;
771  }
772  case found_back_outer: {
773  prefix = chunk_split(p, prefix, back_outer, x, other.back_outer);
774  break;
775  }
776  case found_nowhere: {
777  // don't split (item not found)
778  break;
779  }
780  } // end switch
781  // reset cached values
782  reset_cached();
783  other.reset_cached();
784  // restore invariants
785  restore_both_outer_empty_middle_empty();
786  other.restore_both_outer_empty_middle_empty();
787  }
788  return prefix;
789  }
790 
791  // take a chunk "c" and concatenate its content into the back of the middle sequence
792  // leaving "c" empty.
793  // assumes deep level
794  void push_buffer_back(chunk_type& c) {
795  size_t csize = c.size();
796  if (csize == 0) {
797  // do nothing
798  } else if (middle->empty()) {
799  push_buffer_back_force(c);
800  } else {
801  chunk_pointer b = chunk_pointer_of_cached_item(middle->back());
802  size_t bsize = b->size();
803  if (bsize + csize > Chunk_capacity) {
804  push_buffer_back_force(c);
805  } else {
806  middle->pop_back();
807  c.transfer_from_front_to_back(meas_fct, *b, csize);
808  middle->push_back(cached_item_of_chunk_pointer(b));
809  }
810  }
811  }
812 
813  // symmetric to push_buffer_back
814  void push_buffer_front(chunk_type& c) {
815  size_t csize = c.size();
816  if (csize == 0) {
817  // do nothing
818  } else if (middle->empty()) {
819  push_buffer_front_force(c);
820  } else {
821  chunk_pointer b = chunk_pointer_of_cached_item(middle->front());
822  size_t bsize = b->size();
823  if (bsize + csize > Chunk_capacity) {
824  push_buffer_front_force(c);
825  } else {
826  middle->pop_front();
827  c.transfer_from_back_to_front(meas_fct, *b, csize);
828  middle->push_front(cached_item_of_chunk_pointer(b));
829  }
830  }
831  }
832 
833  // concatenate data from other; leaving other empty
834  void concat(self_type& other) {
835  if (other.is_shallow()) {
836  size_type nb = other.shallow_chunk.size();
837  for (int i = 0; i < nb; i++) {
838  cached_item_type x = other.shallow_chunk.pop_front(meas_fct);
839  push_back(x);
840  }
841  } else if (is_shallow()) {
842  swap(other);
843  size_type nb = other.shallow_chunk.size();
844  for (int i = 0; i < nb; i++) {
845  cached_item_type x = other.shallow_chunk.pop_back(meas_fct);
846  push_front(x);
847  }
848  } else { // both deep
849  // push buffers into the middle sequences
850  push_buffer_back(back_inner);
851  push_buffer_back(back_outer);
852  other.push_buffer_front(other.front_inner);
853  other.push_buffer_front(other.front_outer);
854  // fuse front and back, if needed
855  if (! middle->empty() && ! other.middle->empty()) {
856  chunk_pointer c1 = chunk_pointer_of_cached_item(middle->back());
857  chunk_pointer c2 = chunk_pointer_of_cached_item(other.middle->front());
858  size_type nb1 = c1->size();
859  size_type nb2 = c2->size();
860  if (nb1 + nb2 <= Chunk_capacity) {
861  middle->pop_back();
862  other.middle->pop_front();
863  c2->transfer_from_front_to_back(meas_fct, *c1, nb2);
864  chunk_free_when_empty(c2);
865  middle->push_back(cached_item_of_chunk_pointer(c1));
866  }
867  }
868  // migrate back chunks of the other
869  back_inner.swap(other.back_inner);
870  back_outer.swap(other.back_outer);
871  // concatenate the middle sequences
872  middle->concat(*(other.middle));
873  // update the cache --alternative: reset_cached();
874  cached = algebra_type::combine(cached, other.cached);
875  // restore invariants
876  restore_both_outer_empty_middle_empty();
877  // turn other (which is now empty) into shallow
878  other.convert_deep_to_shallow();
879  assert(other.empty());
880  }
881  }
882 
883  void rec_check(int depth) const {
884  #ifdef BOOTCHUNKEDSEQ_CHECK
885  if (! is_shallow()) { // deep
886  measured_type wfo = chunk_check_weight(depth, front_outer);
887  measured_type wfi = chunk_check_weight(depth, front_inner);
888  measured_type wbi = chunk_check_weight(depth, back_inner);
889  measured_type wbo = chunk_check_weight(depth, back_outer);
890  measured_type wmid = middle->get_cached();
891  measured_type w = algebra_type::combine(wfo, wfi);
892  w = algebra_type::combine(w, wmid);
893  w = algebra_type::combine(w, wbi);
894  w = algebra_type::combine(w, wbo);
895  size_t sfo = front_outer.size();
896  size_t sfi = front_inner.size();
897  size_t sbi = back_inner.size();
898  size_t sbo = back_outer.size();
899  assert (sfo != 0 || sfi == 0); // outer empty implies inner empty
900  assert (sbo != 0 || sbi == 0); // outer empty implies inner empty
901  assert (sfi == 0 || sfi == Chunk_capacity); // empty or full
902  assert (sbi == 0 || sbi == Chunk_capacity); // empty or full
903  assert (sfo + sbo > 0); // not both outer empty (else shallow)
904  // todo: invariant on chunks in middle sequence
905  middle->rec_check(depth+1);
906  }
907  #endif
908  }
909 
910  void rec_print(int depth) {
911  if (is_shallow()) {
912  printf("S:");
913  rec_print_chunk(depth, shallow_chunk);
914  printf("\n");
915  } else { // deep
916  printf("D:{");
917  // CachePrinter::print(d.cached);
918  std::cout << cached;
919  printf("} ");
920  rec_print_chunk(depth, front_outer);
921  printf(" ");
922  rec_print_chunk(depth, front_inner);
923  printf(" || ");
924  rec_print_chunk(depth, back_inner);
925  printf(" ");
926  rec_print_chunk(depth, back_outer);
927  printf("\n");
928  middle->rec_print(depth+1);
929  }
930  }
931 
932  template <class Add_edge, class Process_chunk>
933  void rec_reveal_internal_structure(const Add_edge& add_edge,
934  const Process_chunk& process_chunk,
935  int depth) const {
936  if (is_shallow()) {
937  add_edge(this, &shallow_chunk);
938  rec_reveal_internal_structure_of_chunk(add_edge, process_chunk, depth, shallow_chunk);
939  } else { // deep
940  add_edge((void*)this, (void*)&front_outer);
941  add_edge((void*)this, (void*)&front_inner);
942  add_edge((void*)this, (void*)middle);
943  add_edge((void*)this, (void*)&back_inner);
944  add_edge((void*)this, (void*)&back_outer);
945  rec_reveal_internal_structure_of_chunk(add_edge, process_chunk, depth, front_outer);
946  rec_reveal_internal_structure_of_chunk(add_edge, process_chunk, depth, front_inner);
947  middle->rec_reveal_internal_structure(add_edge, process_chunk, depth+1);
948  rec_reveal_internal_structure_of_chunk(add_edge, process_chunk, depth, back_inner);
949  rec_reveal_internal_structure_of_chunk(add_edge, process_chunk, depth, back_outer);
950  }
951  }
952 
953  template <class Body>
954  void rec_for_each(int depth, const Body& f) const {
955  if (is_shallow()) {
956  chunk_for_each(depth, f, shallow_chunk);
957  } else {
958  chunk_for_each(depth, f, front_outer);
959  chunk_for_each(depth, f, front_inner);
960  middle->rec_for_each(depth+1, f);
961  chunk_for_each(depth, f, back_inner);
962  chunk_for_each(depth, f, back_outer);
963  }
964  }
965  };
966 
967  /*---------------------------------------------------------------------*/
968 
969  layer top_layer;
970 
971  /*---------------------------------------------------------------------*/
972 
973  static inline cached_item_type cached_item_of_chunk_pointer(chunk_pointer c) {
974  item_type x;
975  x.rec_item = c;
976  measured_type w = c->get_cached();
977  cached_item_type v(x, w);
978  return v;
979  }
980 
981  static inline cached_item_type cached_item_of_top_item(top_item_type y, measured_type w) {
982  item_type x;
983  x.top_item = y;
984  cached_item_type v(x, w);
985  return v;
986  }
987 
988  static inline chunk_pointer chunk_pointer_of_cached_item(const cached_item_type v) {
989  return v.get_item().rec_item;
990  }
991 
992  static inline const_chunk_pointer const_chunk_pointer_of_cached_item(const cached_item_type v) {
993  return v.get_item().rec_item;
994  }
995 
996  static inline top_item_type top_item_of_cached_item(const cached_item_type v) {
997  return v.get_item().top_item;
998  }
999 
1000 
1001  /*---------------------------------------------------------------------*/
1002 
1003  static inline chunk_pointer chunk_alloc() {
1004  return new chunk_type();
1005  }
1006 
1007  static inline void chunk_free_when_empty(chunk_pointer c) {
1008  delete c;
1009  }
1010 
1011  // recursively delete all the objects stored in the chunk,
1012  // leaving the current chunk in an unstable state;
1013  // only use this function to implement the destructor
1014  static inline void chunk_deep_free(int depth, chunk_type& c) {
1015  c.for_each([depth] (cached_item_type& v) {
1016  if (depth == 0) {
1017  top_item_type x = top_item_of_cached_item(v);
1018  Top_item_deleter::dealloc(x);
1019  } else {
1020  chunk_pointer d = chunk_pointer_of_cached_item(v);
1021  chunk_deep_free(depth-1, *d);
1022  delete d;
1023  }
1024  });
1025  }
1026 
1027  static inline void chunk_deep_free_and_destruct(int depth, chunk_type& c) {
1028  chunk_deep_free(depth, c);
1029  c.~chunk_type();
1030  }
1031 
1032  // recursively copy a chunk into another one,
1033  // using the copy constructor of class Top_item_base for copying top items
1034  static void chunk_deep_copy(int depth, const chunk_type& src, chunk_type& dst) {
1035  src.for_each([&] (const cached_item_type& v) {
1036  cached_item_type c;
1037  if (depth == 0) {
1038  top_item_type orig = top_item_of_cached_item(v);
1039  top_item_type copy = Top_item_copier::copy(orig);
1040  c = cached_item_of_top_item(copy, v.get_cached());
1041  } else {
1042  const_chunk_pointer orig = const_chunk_pointer_of_cached_item(v);
1043  chunk_pointer copy = new chunk_type();
1044  chunk_deep_copy(depth-1, *orig, *copy);
1045  c = cached_item_of_chunk_pointer(copy);
1046  }
1047  measure_type meas_fct; //TODO
1048  dst.push_back(meas_fct, c);
1049  });
1050  }
1051 
1052  // apply a given function f to the top items of the tree rooted at node c
1053  template <class Body>
1054  static void chunk_for_each(int depth, const Body& f, const chunk_type& c) {
1055  c.for_each([&] (const cached_item_type& v) {
1056  if (depth == 0) {
1057  top_item_type item = top_item_of_cached_item(v);
1058  f(item);
1059  } else {
1060  const_chunk_pointer c = const_chunk_pointer_of_cached_item(v);
1061  chunk_for_each(depth-1, f, *c);
1062  }
1063  });
1064  }
1065 
1066  static inline measured_type chunk_check_weight(int depth, const chunk_type& c) {
1067  measured_type wref = c.get_cached();
1068  measured_type w = algebra_type::identity();
1069  c.for_each([&w, depth] (cached_item_type& v) {
1070  if (depth == 0) {
1071  w = algebra_type::combine(w, v.get_cached());
1072  } else {
1073  measured_type wiref = v.get_cached();
1074  chunk_pointer d = chunk_pointer_of_cached_item(v);
1075  measured_type wi = chunk_check_weight(depth-1, *d);
1076  //assert(Size_access::csize(wi) == Size_access::csize(wiref));
1077  //assert(wi == wiref);
1078  //TODO?
1079  w = algebra_type::combine(w, wiref);
1080  }
1081  });
1082  // assert(Size_access::csize(w) == Size_access::csize(wref));
1083  // assert(w == wref);
1084  //TODO?
1085  return wref;
1086  }
1087 
1088  static void rec_print_item(int depth, cached_item_type& x) {
1089  if (depth == 0) {
1090  top_item_type y = top_item_of_cached_item(x);
1091  Top_item_base v = *y;
1092  std::cout << v.value;
1093  } else {
1094  chunk_pointer d = chunk_pointer_of_cached_item(x);
1095  rec_print_chunk(depth-1, *d);
1096  }
1097  }
1098 
1099  template <class Add_edge, class Process_chunk>
1100  static void rec_reveal_internal_structure_of_chunk(const Add_edge& add_edge,
1101  const Process_chunk& process_chunk,
1102  int depth,
1103  const chunk_type& c) {
1104  c.for_each([&] (const cached_item_type& v) {
1105  if (depth == 0) {
1106  top_item_type item = top_item_of_cached_item(v);
1107  add_edge((void*)&c, (void*)item);
1108  process_chunk(item);
1109  } else {
1110  const_chunk_pointer d = const_chunk_pointer_of_cached_item(v);
1111  add_edge((void*)&c, (void*)d);
1112  rec_reveal_internal_structure_of_chunk(add_edge, process_chunk, depth-1, *d);
1113  }
1114  });
1115  }
1116 
1117  static void rec_print_chunk(int depth, const chunk_type& c) {
1118  printf("<");
1119  std::cout << c.get_cached();
1120  printf(">(");
1121  c.for_each([&] (cached_item_type& x) {
1122  rec_print_item(depth, x);
1123  printf(" ");
1124  });
1125  printf(")");
1126  }
1127 
1128  /*---------------------------------------------------------------------*/
1129 
1130  /* 3-way split: place in `x` the item that reaches the targeted measure.
1131  *
1132  * We define the targeted measure as the value `m` for which `p(m)` returns
1133  * `true`, where `m` equals the combined measures of the items in the
1134  * sequence up to and including `x`
1135  * (i.e., `algebra_type::combine(prefix, meas_fct(x))`).
1136  *
1137  * Assumes `dst` to be initially uninitialized.
1138  */
1139  template <class Pred>
1140  static measured_type chunk_split(const Pred& p,
1141  measured_type prefix,
1142  chunk_type& src,
1143  cached_item_type& x,
1144  chunk_type& dst) {
1145  measure_type meas_fct; //TODO
1146  return src.split(meas_fct, p, prefix, x, dst);
1147  }
1148 
1149  /*---------------------------------------------------------------------*/
1150 
1151  using chunk_search_type = itemsearch::search_in_chunk<chunk_type, algebra_type>;
1152  using chunk_search_result_type = typename chunk_search_type::result_type;
1153 
1154  /* Find how to split a chunk to reach a measure targeted by a given predicate `p`.
1155  * Returns the position expressed as a one-based index that is relative to the
1156  * chunk; and returns the measure, relative to the enclosing sequence, to the
1157  * left of the position (i.e., the `prefix` value).
1158  */
1159  template <class Pred>
1160  static chunk_search_result_type chunk_search(const Pred& p,
1161  measured_type prefix,
1162  chunk_type& c) {
1163  chunk_search_type search;
1164  measure_type meas_fct; //TODO
1165  return search(c, meas_fct, prefix, p);
1166  }
1167 
1168 
1169 
1170  /*---------------------------------------------------------------------*/
1171 
1172  static constexpr int depth0 = 0;
1173 
1174 public:
1175 
1176  using value_type = top_item_type;
1177 
1178  cdeque() {}
1179 
1180  ~cdeque() {}
1181 
1182  cdeque(const self_type& other) {
1183  top_layer.rec_copy(depth0, other.top_layer);
1184  }
1185 
1186  void swap(self_type& other) {
1187  top_layer.swap(other.top_layer);
1188  }
1189 
1190  inline bool empty() const {
1191  return top_layer.empty();
1192  }
1193 
1194  inline measured_type get_cached() const {
1195  return top_layer.get_cached();
1196  }
1197 
1198  inline void push_front(const top_measure_type& top_meas, const value_type& x) {
1199  cached_item_type v = cached_item_of_top_item(x, top_meas(x));
1200  top_layer.push_front(v);
1201  check();
1202  }
1203 
1204  inline void push_back(const top_measure_type& top_meas, const value_type& x) {
1205  cached_item_type v = cached_item_of_top_item(x, top_meas(x));
1206  top_layer.push_back(v);
1207  check();
1208  }
1209 
1210  inline value_type front() const {
1211  cached_item_type v = top_layer.front();
1212  return top_item_of_cached_item(v);
1213  }
1214 
1215  inline value_type back() const {
1216  cached_item_type v = top_layer.back();
1217  return top_item_of_cached_item(v);
1218  }
1219 
1220  inline value_type cback() const {
1221  return back();
1222  }
1223 
1224 
1225  // TODO: why this unnamed argument?
1227  cached_item_type v = top_layer.pop_front();
1228  return top_item_of_cached_item(v);
1229  check();
1230  }
1231 
1233  cached_item_type v = top_layer.pop_back();
1234  return top_item_of_cached_item(v);
1235  check();
1236  }
1237 
1238  // concatenate the items of "other" to the right of the current sequence,
1239  // in place; leaves the "other" structure empty.
1240  void concat(const top_measure_type&, self_type& other) {
1241  top_layer.concat(other.top_layer);
1242  check();
1243  other.check();
1244  }
1245 
1246  template <class Pred>
1247  measured_type search_for_chunk(const Pred& p, measured_type prefix,
1248  const Top_item_base*& c) const {
1249  prefix = top_layer.search(p, prefix, c);
1250  return prefix;
1251  }
1252 
1253  template <class Pred>
1254  measured_type split(const top_measure_type&,
1255  const Pred& p,
1256  measured_type prefix,
1257  value_type& x,
1258  self_type& other) {
1259  cached_item_type v;
1260  prefix = top_layer.split(p, prefix, v, other.top_layer);
1261  x = top_item_of_cached_item(v);
1262  check();
1263  other.check();
1264  return prefix;
1265  }
1266 
1267  // 2-way splitting; assumes `other` to be empty.
1268  template <class Pred>
1269  measured_type split(const top_measure_type& meas,
1270  const Pred& p,
1271  measured_type prefix,
1272  self_type& other) {
1273  value_type v;
1274  prefix = split(meas, p, prefix, v, other);
1275  other.push_front(meas, v);
1276  check();
1277  other.check();
1278  return prefix;
1279  }
1280 
1281  template <class Body>
1282  void for_each(const Body& f) const {
1283  top_layer.rec_for_each(0, f);
1284  }
1285 
1286  measure_type get_measure() const {
1287  return meas_fct;
1288  }
1289 
1290  inline void push_front(const value_type& x, measured_type w) {
1291  cached_item_type v = cached_item_of_top_item(x, w);
1292  top_layer.push_front(v);
1293  }
1294 
1295  inline void push_back(const value_type& x, measured_type w) {
1296  cached_item_type v = cached_item_of_top_item(x, w);
1297  top_layer.push_back(v);
1298  }
1299 
1300 
1301  // ---for debugging
1302 
1304  top_measure_type meas;
1305  return pop_front(meas);
1306  }
1307 
1309  top_measure_type meas;
1310  return pop_back(meas);
1311  }
1312 
1313  void concat(self_type& other) {
1314  top_measure_type meas;
1315  concat(meas, other);
1316  }
1317 
1318  /* deprecated
1319  // 3-way splitting; assumes "other" to be initially empty.
1320  void split(measured_type wtarget, value_type& x, self_type& other) {
1321  measured_type prefix = algebra_type::identity();
1322  size_type orig_sz = get_cached();
1323  size_type nb_target = wtarget + 1;
1324  auto p = [nb_target] (measured_type m) {
1325  return nb_target <= m;
1326  };
1327  top_measure_type top_meas;
1328  prefix = split(top_meas, p, prefix, x, other);
1329  size_type sz = get_cached();
1330  assert(sz == wtarget);
1331  size_type other_sz = other.get_cached();
1332  assert(other_sz + sz == orig_sz);
1333  }
1334  */
1335 
1336  // only works when size() = get_cached()
1337  // 2-way splitting; assumes `other` to be empty.
1338  void split(size_type n, self_type& other) {
1339  check();
1340  other.check();
1341  size_type size_orig = get_cached();
1342  assert(n >= 0);
1343  assert(n <= size_orig);
1344  if (n == 0) {
1345  swap(other);
1346  return; // todo: perform checks
1347  }
1348  if (n == size_orig) {
1349  return; // todo: perform checks
1350  }
1351  auto p = [n] (measured_type m) {
1352  return n+1 <= m;
1353  };
1354  top_measured_type prefix = algebra_type::identity();
1355  top_measure_type top_meas;
1356  prefix = split(top_meas, p, prefix, other);
1357 
1358  check();
1359  other.check();
1360  size_type size_cur = get_cached();
1361  size_type size_other = other.get_cached();
1362  //printf(" � %d %d\n", size_cur, size_other);
1363  assert(size_other + size_cur == size_orig);
1364  assert(size_cur == n);
1365  assert(size_other + n == size_orig);
1366  }
1367 
1368  void check() {
1369  #ifdef BOOTCHUNKEDSEQ_CHECK
1370  top_layer.rec_check(depth0);
1371  //printf("check invariants successful\n");
1372  #endif
1373  }
1374 
1375  // TODO: currently ItemPrinter is not used; remove its use?
1376  template <class ItemPrinter>
1377  void print() {
1378  top_layer.rec_print(depth0);
1379  }
1380 
1381  template <class Add_edge, class Process_chunk>
1382  void reveal_internal_structure(const Add_edge& add_edge,
1383  const Process_chunk& process_chunk) const {
1384  add_edge((void*)this, (void*)&top_layer);
1385  top_layer.rec_reveal_internal_structure(add_edge, process_chunk, depth0);
1386  }
1387 
1388 
1389 };
1390 
1391 
1392 
1393 
1394 
1395 
1396 /***********************************************************************/
1397 
1398 } // end namespace
1399 } // end namespace
1400 } // end namespace
1401 } // end namespace
1402 
1403 #endif
void push_back(const top_measure_type &top_meas, const value_type &x)
enum{PARENT_POINTER_BOOTSTRAP_INTERIOR_NODE=0, PARENT_POINTER_BOOTSTRAP_LAYER_NODE=1, PARENT_POINTER_CHUNK=2, PARENT_POINTER_UNINITIALIZED=3} parent_pointer_tag
Definition: annotation.hpp:37
void reveal_internal_structure(const Add_edge &add_edge, const Process_chunk &process_chunk) const
base::ringbuffer_ptr< base::heap_allocator< Item, Capacity+1 >> ringbuffer_ptr
void concat(const top_measure_type &, self_type &other)
measured_type split(const top_measure_type &meas, const Pred &p, measured_type prefix, self_type &other)
measured_type search_for_chunk(const Pred &p, measured_type prefix, const Top_item_base *&c) const
value_type pop_front(const top_measure_type &)
Defines an interface for attaching data to nodes in the underlying tree structure of the chunked sequ...
measured_type split(const top_measure_type &, const Pred &p, measured_type prefix, value_type &x, self_type &other)
void copy(typename Alloc::pointer destination, typename Alloc::const_pointer source, typename Alloc::size_type num)
Polymorphic array copy.
Definition: algebra.hpp:18
Fixed capacity vectors.
Definitions of a few cached-measurement policies.
value_type pop_back(const top_measure_type &)
void push_front(const value_type &x, measured_type w)
Routines for finding an item in a container via binary search.
measured_type operator()(const cached_item_type &x) const
void split(size_type n, self_type &other)
measured_type operator()(const cached_item_type *lo, const cached_item_type *hi) const
void push_back(const value_type &x, measured_type w)
Representation of a chunk.
void push_front(const top_measure_type &top_meas, const value_type &x)
enum{begin, end} position_type
Definition: iterator.hpp:28
bytes_8 Item
Definition: do_fifo.cpp:107