btree.h
Go to the documentation of this file.
1 /*--------------------------------------------------------------------------------------+
2 |
3 | Supplied under applicable software license agreement.
4 |
5 | Copyright (c) 2018 Bentley Systems, Incorporated. All rights reserved.
6 |
7 +---------------------------------------------------------------------------------------*/
8 #pragma once
9 
11 // **************************************************************************
12 // *
13 // * NOTICE: This File contains modifications made by Bentley Systems where designated.
14 // *
15 // *************************************************************************/
16 
17 // Copyright 2013 Google Inc. All Rights Reserved.
18 //
19 // Licensed under the Apache License, Version 2.0 (the "License");
20 // you may not use this file except in compliance with the License.
21 // You may obtain a copy of the License at
22 //
23 // http://www.apache.org/licenses/LICENSE-2.0
24 //
25 // Unless required by applicable law or agreed to in writing, software
26 // distributed under the License is distributed on an "AS IS" BASIS,
27 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
28 // See the License for the specific language governing permissions and
29 // limitations under the License.
30 //
31 // A btree implementation of the STL set and map interfaces. A btree is both
32 // smaller and faster than STL set/map. The red-black tree implementation of
33 // STL set/map has an overhead of 3 pointers (left, right and parent) plus the
34 // node color information for each stored value. So a set<int32> consumes 20
35 // bytes for each value stored. This btree implementation stores multiple
36 // values on fixed size nodes (usually 256 bytes) and doesn't store child
37 // pointers for leaf nodes. The result is that a btree_set<int32> may use much
38 // less memory per stored value. For the random insertion benchmark in
39 // btree_test.cc, a btree_set<int32> with node-size of 256 uses 4.9 bytes per
40 // stored value.
41 //
42 // The packing of multiple values on to each node of a btree has another effect
43 // besides better space utilization: better cache locality due to fewer cache
44 // lines being accessed. Better cache locality translates into faster
45 // operations.
46 //
47 // CAVEATS
48 //
49 // Insertions and deletions on a btree can cause splitting, merging or
50 // rebalancing of btree nodes. And even without these operations, insertions
51 // and deletions on a btree will move values around within a node. In both
52 // cases, the result is that insertions and deletions can invalidate iterators
53 // pointing to values other than the one being inserted/deleted. This is
54 // notably different from STL set/map which takes care to not invalidate
55 // iterators on insert/erase except, of course, for iterators pointing to the
56 // value being erased. A partial workaround when erasing is available:
57 // erase() returns an iterator pointing to the item just after the one that was
58 // erased (or end() if none exists). See also safe_btree.
59 
60 #ifndef BENTLEY_UTIL_BTREE_BTREE_H__
61 #define BENTLEY_UTIL_BTREE_BTREE_H__
62 
63 #include <Bentley/WString.h>
65 #include <assert.h>
66 #include <stddef.h>
67 #include <sys/types.h>
68 #include <algorithm>
69 #include <functional>
70 #include <iterator>
71 #include <limits>
72 #include <type_traits>
73 #include <new>
74 #include <utility>
75 
76 #pragma push_macro ("min")
77 #pragma push_macro ("max")
78 #ifdef max
79 #undef max
80 #undef min
81 #endif
82 
84 
85 using Bstdcxx::bpair;
87 
88 #if defined(_MSC_VER)
89 typedef intptr_t ssize_t;
90 #endif
91 
92 // Inside a btree method, if we just call swap(), it will choose the
93 // btree::swap method, which we don't want. And we can't say ::swap
94 // because then MSVC won't pickup any std::swap() implementations. We
95 // can't just use std::swap() directly because then we don't get the
96 // specialization for types outside the std namespace. So the solution
97 // is to have a special swap helper function whose name doesn't
98 // collide with other swap functions defined by the btree classes.
99 template <typename T>
100 inline void btree_swap_helper(T &a, T &b) {
101  using std::swap;
102  swap(a, b);
103 }
104 
105 // A template helper used to select A or B based on a condition.
106 template<bool cond, typename A, typename B>
107 struct if_{
108  typedef A type;
109 };
110 
111 template<typename A, typename B>
112 struct if_<false, A, B> {
113  typedef B type;
114 };
115 
116 // Types small_ and big_ are promise that sizeof(small_) < sizeof(big_)
117 typedef char small_;
118 
119 struct big_ {
120  char dummy[2];
121 };
122 
123 // A compile-time assertion.
124 template <bool>
125 struct CompileAssert {
126 };
127 
128 #define COMPILE_ASSERT(expr, msg) \
129  typedef CompileAssert<(bool(expr))> msg[bool(expr) ? 1 : -1]
130 
131 // A helper type used to indicate that a key-compare-to functor has been
132 // provided. A user can specify a key-compare-to functor by doing:
133 //
134 // struct MyStringComparer
135 // : public util::btree::btree_key_compare_to_tag {
136 // int operator()(const string &a, const string &b) const {
137 // return a.compare(b);
138 // }
139 // };
140 //
141 // Note that the return type is an int and not a bool. There is a
142 // COMPILE_ASSERT which enforces this return type.
143 struct btree_key_compare_to_tag {
144 };
145 
146 // A helper class that indicates if the Compare parameter is derived from
147 // btree_key_compare_to_tag.
148 template <typename Compare>
149 struct btree_is_key_compare_to
150  : public std::is_convertible<Compare, btree_key_compare_to_tag> {
151 };
152 
153 // A helper class to convert a boolean comparison into a three-way
154 // "compare-to" comparison that returns a negative value to indicate
155 // less-than, zero to indicate equality and a positive value to
156 // indicate greater-than. This helper class is specialized for
157 // less<string> and greater<string>. The btree_key_compare_to_adapter
158 // class is provided so that btree users automatically get the more
159 // efficient compare-to code when using common Bentley string types // BENTLEY CHANGES
160 // with common comparison functors.
161 template <typename Compare>
162 struct btree_key_compare_to_adapter : Compare {
163  btree_key_compare_to_adapter() { }
164  btree_key_compare_to_adapter(const Compare &c) : Compare(c) { }
165  btree_key_compare_to_adapter(const btree_key_compare_to_adapter<Compare> &c)
166  : Compare(c) {
167  }
168 };
169 
170 template <>
171 struct btree_key_compare_to_adapter<std::less<bastring> >
172  : public btree_key_compare_to_tag {
173  btree_key_compare_to_adapter() {}
174  btree_key_compare_to_adapter(const std::less<bastring>&) {}
175  btree_key_compare_to_adapter(
176  const btree_key_compare_to_adapter<std::less<bastring> >&) {}
177  int operator()(const bastring &a, const bastring &b) const {
178  return a.compare(b);
179  }
180 };
181 
182 template <>
183 struct btree_key_compare_to_adapter<std::greater<bastring> >
184  : public btree_key_compare_to_tag {
185  btree_key_compare_to_adapter() {}
186  btree_key_compare_to_adapter(const std::greater<bastring>&) {}
187  btree_key_compare_to_adapter(
188  const btree_key_compare_to_adapter<std::greater<bastring> >&) {}
189  int operator()(const bastring &a, const bastring &b) const {
190  return b.compare(a);
191  }
192 };
193 template <>
194 struct btree_key_compare_to_adapter<std::less<bwstring> >
195  : public btree_key_compare_to_tag {
196  btree_key_compare_to_adapter() {}
197  btree_key_compare_to_adapter(const std::less<bwstring>&) {}
198  btree_key_compare_to_adapter(
199  const btree_key_compare_to_adapter<std::less<bwstring> >&) {}
200  int operator()(const bwstring &a, const bwstring &b) const {
201  return a.compare(b);
202  }
203 };
204 
205 template <>
206 struct btree_key_compare_to_adapter<std::greater<bwstring> >
207  : public btree_key_compare_to_tag {
208  btree_key_compare_to_adapter() {}
209  btree_key_compare_to_adapter(const std::greater<bwstring>&) {}
210  btree_key_compare_to_adapter(
211  const btree_key_compare_to_adapter<std::greater<bwstring> >&) {}
212  int operator()(const bwstring &a, const bwstring &b) const {
213  return b.compare(a);
214  }
215 };
216 
217 // A helper class that allows a compare-to functor to behave like a plain
218 // compare functor. This specialization is used when we do not have a
219 // compare-to functor.
220 template <typename Key, typename Compare, bool HaveCompareTo>
221 struct btree_key_comparer {
222  btree_key_comparer() {}
223  btree_key_comparer(Compare c) : comp(c) {}
224  static bool bool_compare(const Compare &comp, const Key &x, const Key &y) {
225  return comp(x, y);
226  }
227  bool operator()(const Key &x, const Key &y) const {
228  return bool_compare(comp, x, y);
229  }
230  Compare comp;
231 };
232 
233 // A specialization of btree_key_comparer when a compare-to functor is
234 // present. We need a plain (boolean) comparison in some parts of the btree
235 // code, such as insert-with-hint.
236 template <typename Key, typename Compare>
237 struct btree_key_comparer<Key, Compare, true> {
238  btree_key_comparer() {}
239  btree_key_comparer(Compare c) : comp(c) {}
240  static bool bool_compare(const Compare &comp, const Key &x, const Key &y) {
241  return comp(x, y) < 0;
242  }
243  bool operator()(const Key &x, const Key &y) const {
244  return bool_compare(comp, x, y);
245  }
246  Compare comp;
247 };
248 
249 // A helper function to compare to keys using the specified compare
250 // functor. This dispatches to the appropriate btree_key_comparer comparison,
251 // depending on whether we have a compare-to functor or not (which depends on
252 // whether Compare is derived from btree_key_compare_to_tag).
253 template <typename Key, typename Compare>
254 static bool btree_compare_keys(
255  const Compare &comp, const Key &x, const Key &y) {
256  typedef btree_key_comparer<Key, Compare,
257  btree_is_key_compare_to<Compare>::value> key_comparer;
258  return key_comparer::bool_compare(comp, x, y);
259 }
260 
261 template <typename Key, typename Compare,
262  typename Alloc, int TargetNodeSize, int ValueSize>
263 struct btree_common_params {
264  // If Compare is derived from btree_key_compare_to_tag then use it as the
265  // key_compare type. Otherwise, use btree_key_compare_to_adapter<> which will
266  // fall-back to Compare if we don't have an appropriate specialization.
267  typedef typename if_<
268  btree_is_key_compare_to<Compare>::value,
269  Compare, btree_key_compare_to_adapter<Compare> >::type key_compare;
270  // A type which indicates if we have a key-compare-to functor or a plain old
271  // key-compare functor.
272  typedef btree_is_key_compare_to<key_compare> is_key_compare_to;
273 
274  typedef Alloc allocator_type;
275  typedef Key key_type;
276  typedef ssize_t size_type;
277  typedef ptrdiff_t difference_type;
278 
279  enum {
280  kTargetNodeSize = TargetNodeSize,
281 
282  // Available space for values. This is largest for leaf nodes,
283  // which has overhead no fewer than two pointers.
284  kNodeValueSpace = TargetNodeSize - 2 * sizeof(void*),
285  };
286 
287  // This is an integral type large enough to hold as many
288  // ValueSize-values as will fit a node of TargetNodeSize bytes.
289  typedef typename if_<
290  (kNodeValueSpace / ValueSize) >= 256,
291  uint16_t,
292  uint8_t>::type node_count_type;
293 };
294 
295 // A parameters structure for holding the type parameters for a bmap.
296 template <typename Key, typename Data, typename Compare,
297  typename Alloc, int TargetNodeSize>
298 struct bmap_params
299  : public btree_common_params<Key, Compare, Alloc, TargetNodeSize,
300  sizeof(Key) + sizeof(Data)> {
301  typedef Data data_type;
302  typedef Data mapped_type;
303  typedef bpair<const Key, data_type> value_type;
304  typedef bpair<Key, data_type> mutable_value_type;
305  typedef value_type* pointer;
306  typedef const value_type* const_pointer;
307  typedef value_type& reference;
308  typedef const value_type& const_reference;
309 
310  enum {
311  kValueSize = sizeof(Key) + sizeof(data_type),
312  };
313 
314  static const Key& key(const value_type &x) { return x.first; }
315  static const Key& key(const mutable_value_type &x) { return x.first; }
316  static void swap(mutable_value_type *a, mutable_value_type *b) {
317  btree_swap_helper(a->first, b->first);
318  btree_swap_helper(a->second, b->second);
319  }
320 };
321 
322 // A parameters structure for holding the type parameters for a btree_set.
323 template <typename Key, typename Compare, typename Alloc, int TargetNodeSize>
324 struct btree_set_params
325  : public btree_common_params<Key, Compare, Alloc, TargetNodeSize,
326  sizeof(Key)> {
327  typedef std::false_type data_type;
328  typedef std::false_type mapped_type;
329  typedef Key value_type;
330  typedef value_type mutable_value_type;
331  typedef value_type* pointer;
332  typedef const value_type* const_pointer;
333  typedef value_type& reference;
334  typedef const value_type& const_reference;
335 
336  enum {
337  kValueSize = sizeof(Key),
338  };
339 
340  static const Key& key(const value_type &x) { return x; }
341  static void swap(mutable_value_type *a, mutable_value_type *b) {
342  btree_swap_helper<mutable_value_type>(*a, *b);
343  }
344 };
345 
346 // An adapter class that converts a lower-bound compare into an upper-bound
347 // compare.
348 template <typename Key, typename Compare>
349 struct btree_upper_bound_adapter : public Compare {
350  btree_upper_bound_adapter(Compare c) : Compare(c) {}
351  bool operator()(const Key &a, const Key &b) const {
352  return !static_cast<const Compare&>(*this)(b, a);
353  }
354 };
355 
356 template <typename Key, typename CompareTo>
357 struct btree_upper_bound_compare_to_adapter : public CompareTo {
358  btree_upper_bound_compare_to_adapter(CompareTo c) : CompareTo(c) {}
359  int operator()(const Key &a, const Key &b) const {
360  return static_cast<const CompareTo&>(*this)(b, a);
361  }
362 };
363 
364 // Dispatch helper class for using linear search with plain compare.
365 template <typename K, typename N, typename Compare>
366 struct btree_linear_search_plain_compare {
367  static int lower_bound(const K &k, const N &n, Compare comp) {
368  return n.linear_search_plain_compare(k, 0, n.count(), comp);
369  }
370  static int upper_bound(const K &k, const N &n, Compare comp) {
371  typedef btree_upper_bound_adapter<K, Compare> upper_compare;
372  return n.linear_search_plain_compare(k, 0, n.count(), upper_compare(comp));
373  }
374 };
375 
376 // Dispatch helper class for using linear search with compare-to
377 template <typename K, typename N, typename CompareTo>
378 struct btree_linear_search_compare_to {
379  static int lower_bound(const K &k, const N &n, CompareTo comp) {
380  return n.linear_search_compare_to(k, 0, n.count(), comp);
381  }
382  static int upper_bound(const K &k, const N &n, CompareTo comp) {
383  typedef btree_upper_bound_adapter<K,
384  btree_key_comparer<K, CompareTo, true> > upper_compare;
385  return n.linear_search_plain_compare(k, 0, n.count(), upper_compare(comp));
386  }
387 };
388 
389 // Dispatch helper class for using binary search with plain compare.
390 template <typename K, typename N, typename Compare>
391 struct btree_binary_search_plain_compare {
392  static int lower_bound(const K &k, const N &n, Compare comp) {
393  return n.binary_search_plain_compare(k, 0, n.count(), comp);
394  }
395  static int upper_bound(const K &k, const N &n, Compare comp) {
396  typedef btree_upper_bound_adapter<K, Compare> upper_compare;
397  return n.binary_search_plain_compare(k, 0, n.count(), upper_compare(comp));
398  }
399 };
400 
401 // Dispatch helper class for using binary search with compare-to.
402 template <typename K, typename N, typename CompareTo>
403 struct btree_binary_search_compare_to {
404  static int lower_bound(const K &k, const N &n, CompareTo comp) {
405  return n.binary_search_compare_to(k, 0, n.count(), CompareTo());
406  }
407  static int upper_bound(const K &k, const N &n, CompareTo comp) {
408  typedef btree_upper_bound_adapter<K,
409  btree_key_comparer<K, CompareTo, true> > upper_compare;
410  return n.linear_search_plain_compare(k, 0, n.count(), upper_compare(comp));
411  }
412 };
413 
414 // A node in the btree holding. The same node type is used for both internal
415 // and leaf nodes in the btree, though the nodes are allocated in such a way
416 // that the children array is only valid in internal nodes.
417 template <typename Params>
418 class btree_node {
419  public:
420  typedef Params params_type;
421  typedef btree_node<Params> self_type;
422  typedef typename Params::key_type key_type;
423  typedef typename Params::data_type data_type;
424  typedef typename Params::value_type value_type;
425  typedef typename Params::mutable_value_type mutable_value_type;
426  typedef typename Params::pointer pointer;
427  typedef typename Params::const_pointer const_pointer;
428  typedef typename Params::reference reference;
429  typedef typename Params::const_reference const_reference;
430  typedef typename Params::key_compare key_compare;
431  typedef typename Params::size_type size_type;
432  typedef typename Params::difference_type difference_type;
433  // Typedefs for the various types of node searches.
434  typedef btree_linear_search_plain_compare<
435  key_type, self_type, key_compare> linear_search_plain_compare_type;
436  typedef btree_linear_search_compare_to<
437  key_type, self_type, key_compare> linear_search_compare_to_type;
438  typedef btree_binary_search_plain_compare<
439  key_type, self_type, key_compare> binary_search_plain_compare_type;
440  typedef btree_binary_search_compare_to<
441  key_type, self_type, key_compare> binary_search_compare_to_type;
442  // If we have a valid key-compare-to type, use linear_search_compare_to,
443  // otherwise use linear_search_plain_compare.
444  typedef typename if_<
445  Params::is_key_compare_to::value,
446  linear_search_compare_to_type,
447  linear_search_plain_compare_type>::type linear_search_type;
448  // If we have a valid key-compare-to type, use binary_search_compare_to,
449  // otherwise use binary_search_plain_compare.
450  typedef typename if_<
451  Params::is_key_compare_to::value,
452  binary_search_compare_to_type,
453  binary_search_plain_compare_type>::type binary_search_type;
454  // If the key is an integral or floating point type, use linear search which
455  // is faster than binary search for such types. Might be wise to also
456  // configure linear search based on node-size.
457  typedef typename if_<
458  std::is_integral<key_type>::value ||
459  std::is_floating_point<key_type>::value,
460  linear_search_type, binary_search_type>::type search_type;
461 
462  struct base_fields {
463  typedef typename Params::node_count_type field_type;
464 
465  // A boolean indicating whether the node is a leaf or not.
466  bool leaf;
467  // The position of the node in the node's parent.
468  field_type position;
469  // The maximum number of values the node can hold.
470  field_type max_count;
471  // The count of the number of values in the node.
472  field_type count;
473  // A pointer to the node's parent.
474  btree_node *parent;
475  };
476 
477  enum {
478  kValueSize = params_type::kValueSize,
479  kTargetNodeSize = params_type::kTargetNodeSize,
480 
481  // Compute how many values we can fit onto a leaf node.
482  kNodeTargetValues = (kTargetNodeSize - sizeof(base_fields)) / kValueSize,
483  // We need a minimum of 3 values per internal node in order to perform
484  // splitting (1 value for the two nodes involved in the split and 1 value
485  // propagated to the parent as the delimiter for the split).
486  kNodeValues = kNodeTargetValues >= 3 ? kNodeTargetValues : 3,
487 
488  kExactMatch = 1 << 30,
489  kMatchMask = kExactMatch - 1,
490  };
491 
492  struct leaf_fields : public base_fields {
493  // The array of values. Only the first count of these values have been
494  // constructed and are valid.
495  mutable_value_type values[kNodeValues];
496  };
497 
498  struct internal_fields : public leaf_fields {
499  // The array of child pointers. The keys in children_[i] are all less than
500  // key(i). The keys in children_[i + 1] are all greater than key(i). There
501  // are always count + 1 children.
502  btree_node *children[kNodeValues + 1];
503  };
504 
505  struct root_fields : public internal_fields {
506  btree_node *rightmost;
507  size_type size;
508  };
509 
510  public:
511  // Getter/setter for whether this is a leaf node or not. This value doesn't
512  // change after the node is created.
513  bool leaf() const { return fields_.leaf; }
514 
515  // Getter for the position of this node in its parent.
516  int position() const { return fields_.position; }
517  void set_position(int v) { fields_.position = (typename base_fields::field_type) v; }
518 
519  // Getter/setter for the number of values stored in this node.
520  int count() const { return fields_.count; }
521  void set_count(int v) { fields_.count = (typename base_fields::field_type) v; }
522  int max_count() const { return fields_.max_count; }
523 
524  // Getter for the parent of this node.
525  btree_node* parent() const { return fields_.parent; }
526  // Getter for whether the node is the root of the tree. The parent of the
527  // root of the tree is the leftmost node in the tree which is guaranteed to
528  // be a leaf.
529  bool is_root() const { return parent()->leaf(); }
530  void make_root() {
531  assert(parent()->is_root());
532  fields_.parent = fields_.parent->parent();
533  }
534 
535  // Getter for the rightmost root node field. Only valid on the root node.
536  btree_node* rightmost() const { return fields_.rightmost; }
537  btree_node** mutable_rightmost() { return &fields_.rightmost; }
538 
539  // Getter for the size root node field. Only valid on the root node.
540  size_type size() const { return fields_.size; }
541  size_type* mutable_size() { return &fields_.size; }
542 
543  // Getters for the key/value at position i in the node.
544  const key_type& key(int i) const {
545  return params_type::key(fields_.values[i]);
546  }
547  reference value(int i) {
548  return reinterpret_cast<reference>(fields_.values[i]);
549  }
550  const_reference value(int i) const {
551  return reinterpret_cast<const_reference>(fields_.values[i]);
552  }
553  mutable_value_type* mutable_value(int i) {
554  return &fields_.values[i];
555  }
556 
557  // Swap value i in this node with value j in node x.
558  void value_swap(int i, btree_node *x, int j) {
559  params_type::swap(mutable_value(i), x->mutable_value(j));
560  }
561 
562  // Getters/setter for the child at position i in the node.
563  btree_node* child(int i) const { return fields_.children[i]; }
564  btree_node** mutable_child(int i) { return &fields_.children[i]; }
565  void set_child(int i, btree_node *c) {
566  *mutable_child(i) = c;
567  c->fields_.parent = this;
568  c->fields_.position = (typename base_fields::field_type) i;
569  }
570 
571  // Returns the position of the first value whose key is not less than k.
572  template <typename Compare>
573  int lower_bound(const key_type &k, const Compare &comp) const {
574  return search_type::lower_bound(k, *this, comp);
575  }
576  // Returns the position of the first value whose key is greater than k.
577  template <typename Compare>
578  int upper_bound(const key_type &k, const Compare &comp) const {
579  return search_type::upper_bound(k, *this, comp);
580  }
581 
582  // Returns the position of the first value whose key is not less than k using
583  // linear search performed using plain compare.
584  template <typename Compare>
585  int linear_search_plain_compare(
586  const key_type &k, int s, int e, const Compare &comp) const {
587  while (s < e) {
588  if (!btree_compare_keys(comp, key(s), k)) {
589  break;
590  }
591  ++s;
592  }
593  return s;
594  }
595 
596  // Returns the position of the first value whose key is not less than k using
597  // linear search performed using compare-to.
598  template <typename Compare>
599  int linear_search_compare_to(
600  const key_type &k, int s, int e, const Compare &comp) const {
601  while (s < e) {
602  int c = comp(key(s), k);
603  if (c == 0) {
604  return s | kExactMatch;
605  } else if (c > 0) {
606  break;
607  }
608  ++s;
609  }
610  return s;
611  }
612 
613  // Returns the position of the first value whose key is not less than k using
614  // binary search performed using plain compare.
615  template <typename Compare>
616  int binary_search_plain_compare(
617  const key_type &k, int s, int e, const Compare &comp) const {
618  while (s != e) {
619  int mid = (s + e) / 2;
620  if (btree_compare_keys(comp, key(mid), k)) {
621  s = mid + 1;
622  } else {
623  e = mid;
624  }
625  }
626  return s;
627  }
628 
629  // Returns the position of the first value whose key is not less than k using
630  // binary search performed using compare-to.
631  template <typename CompareTo>
632  int binary_search_compare_to(
633  const key_type &k, int s, int e, const CompareTo &comp) const {
634  while (s != e) {
635  int mid = (s + e) / 2;
636  int c = comp(key(mid), k);
637  if (c < 0) {
638  s = mid + 1;
639  } else if (c > 0) {
640  e = mid;
641  } else {
642  // Need to return the first value whose key is not less than k, which
643  // requires continuing the binary search. Note that we are guaranteed
644  // that the result is an exact match because if "key(mid-1) < k" the
645  // call to binary_search_compare_to() will return "mid".
646  s = binary_search_compare_to(k, s, mid, comp);
647  return s | kExactMatch;
648  }
649  }
650  return s;
651  }
652 
653  // Inserts the value x at position i, shifting all existing values and
654  // children at positions >= i to the right by 1.
655  void insert_value(int i, const value_type &x);
656 
657  // Removes the value at position i, shifting all existing values and children
658  // at positions > i to the left by 1.
659  void remove_value(int i);
660 
661  // Rebalances a node with its right sibling.
662  void rebalance_right_to_left(btree_node *sibling, int to_move);
663  void rebalance_left_to_right(btree_node *sibling, int to_move);
664 
665  // Splits a node, moving a portion of the node's values to its right sibling.
666  void split(btree_node *sibling, int insert_position);
667 
668  // Merges a node with its right sibling, moving all of the values and the
669  // delimiting key in the parent node onto itself.
670  void merge(btree_node *sibling);
671 
672  // Swap the contents of "this" and "src".
673  void swap(btree_node *src);
674 
675  // Node allocation/deletion routines.
676  static btree_node* init_leaf(
677  leaf_fields *f, btree_node *parent, int max_count) {
678  btree_node *n = reinterpret_cast<btree_node*>(f);
679  f->leaf = 1;
680  f->position = 0;
681  f->max_count = (typename leaf_fields::field_type) max_count;
682  f->count = 0;
683  f->parent = parent;
684 #if defined REMOVED_BY_BENTLEY
685  if (!NDEBUG) {
686  memset(&f->values, 0, max_count * sizeof(value_type));
687  }
688 #endif
689  return n;
690  }
691  static btree_node* init_internal(internal_fields *f, btree_node *parent) {
692  btree_node *n = init_leaf(f, parent, kNodeValues);
693  f->leaf = 0;
694 #if defined REMOVED_BY_BENTLEY
695  if (!NDEBUG) {
696  memset(f->children, 0, sizeof(f->children));
697  }
698 #endif
699  return n;
700  }
701  static btree_node* init_root(root_fields *f, btree_node *parent) {
702  btree_node *n = init_internal(f, parent);
703  f->rightmost = parent;
704  f->size = parent->count();
705  return n;
706  }
707  void destroy() {
708  for (int i = 0; i < count(); ++i) {
709  value_destroy(i);
710  }
711  }
712 
713  private:
714  void value_init(int i) {
715  new (&fields_.values[i]) mutable_value_type;
716  }
717  void value_init(int i, const value_type &x) {
718  new (&fields_.values[i]) mutable_value_type(x);
719  }
720  void value_destroy(int i) {
721  fields_.values[i].~mutable_value_type();
722  }
723 
724  private:
725  root_fields fields_;
726 
727  private:
728  btree_node(const btree_node&);
729  void operator=(const btree_node&);
730 };
731 
732 template <typename Node, typename Reference, typename Pointer>
733 struct btree_iterator {
734  typedef typename Node::key_type key_type;
735  typedef typename Node::size_type size_type;
736  typedef typename Node::difference_type difference_type;
737  typedef typename Node::params_type params_type;
738 
739  typedef Node node_type;
740  typedef typename std::remove_const<Node>::type normal_node;
741  typedef const Node const_node;
742  typedef typename params_type::value_type value_type;
743  typedef typename params_type::pointer normal_pointer;
744  typedef typename params_type::reference normal_reference;
745  typedef typename params_type::const_pointer const_pointer;
746  typedef typename params_type::const_reference const_reference;
747 
748  typedef Pointer pointer;
749  typedef Reference reference;
750  typedef std::bidirectional_iterator_tag iterator_category;
751 
752  typedef btree_iterator<
753  normal_node, normal_reference, normal_pointer> iterator;
754  typedef btree_iterator<
755  const_node, const_reference, const_pointer> const_iterator;
756  typedef btree_iterator<Node, Reference, Pointer> self_type;
757 
758  btree_iterator()
759  : node(NULL),
760  position(-1) {
761  }
762  btree_iterator(Node *n, int p)
763  : node(n),
764  position(p) {
765  }
766  btree_iterator(const iterator &x)
767  : node(x.node),
768  position(x.position) {
769  }
770 
771  // Increment/decrement the iterator.
772  void increment() {
773  if (node->leaf() && ++position < node->count()) {
774  return;
775  }
776  increment_slow();
777  }
778  void increment_by(int count);
779  void increment_slow();
780 
781  void decrement() {
782  if (node->leaf() && --position >= 0) {
783  return;
784  }
785  decrement_slow();
786  }
787  void decrement_slow();
788 
789  bool operator==(const const_iterator &x) const {
790  return node == x.node && position == x.position;
791  }
792  bool operator!=(const const_iterator &x) const {
793  return node != x.node || position != x.position;
794  }
795 
796  // Accessors for the key/value the iterator is pointing at.
797  const key_type& key() const {
798  return node->key(position);
799  }
800  reference operator*() const {
801  return node->value(position);
802  }
803  pointer operator->() const {
804  return &node->value(position);
805  }
806 
807  self_type& operator++() {
808  increment();
809  return *this;
810  }
811  self_type& operator--() {
812  decrement();
813  return *this;
814  }
815  self_type operator++(int) {
816  self_type tmp = *this;
817  ++*this;
818  return tmp;
819  }
820  self_type operator--(int) {
821  self_type tmp = *this;
822  --*this;
823  return tmp;
824  }
825 
826  // The node in the tree the iterator is pointing at.
827  Node *node;
828  // The position within the node of the tree the iterator is pointing at.
829  int position;
830 };
831 
832 // Dispatch helper class for using btree::internal_locate with plain compare.
833 struct btree_internal_locate_plain_compare {
834  template <typename K, typename T, typename Iter>
835  static bpair<Iter, int> dispatch(const K &k, const T &t, Iter iter) {
836  return t.internal_locate_plain_compare(k, iter);
837  }
838 };
839 
840 // Dispatch helper class for using btree::internal_locate with compare-to.
841 struct btree_internal_locate_compare_to {
842  template <typename K, typename T, typename Iter>
843  static bpair<Iter, int> dispatch(const K &k, const T &t, Iter iter) {
844  return t.internal_locate_compare_to(k, iter);
845  }
846 };
847 
848 template <typename Params>
849 class btree : public Params::key_compare {
850  typedef btree<Params> self_type;
851  typedef btree_node<Params> node_type;
852  typedef typename node_type::base_fields base_fields;
853  typedef typename node_type::leaf_fields leaf_fields;
854  typedef typename node_type::internal_fields internal_fields;
855  typedef typename node_type::root_fields root_fields;
856  typedef typename Params::is_key_compare_to is_key_compare_to;
857 
858  friend struct btree_internal_locate_plain_compare;
859  friend struct btree_internal_locate_compare_to;
860  typedef typename if_<
861  is_key_compare_to::value,
862  btree_internal_locate_compare_to,
863  btree_internal_locate_plain_compare>::type internal_locate_type;
864 
865  enum {
866  kNodeValues = node_type::kNodeValues,
867  kMinNodeValues = kNodeValues / 2,
868  kValueSize = node_type::kValueSize,
869  kExactMatch = node_type::kExactMatch,
870  kMatchMask = node_type::kMatchMask,
871  };
872 
873  // A helper class to get the empty base class optimization for 0-size
874  // allocators. Base is internal_allocator_type.
875  // (e.g. empty_base_handle<internal_allocator_type, node_type*>). If Base is
876  // 0-size, the compiler doesn't have to reserve any space for it and
877  // sizeof(empty_base_handle) will simply be sizeof(Data). Google [empty base
878  // class optimization] for more details.
879  template <typename Base, typename Data>
880  struct empty_base_handle : public Base {
881  empty_base_handle(const Base &b, const Data &d)
882  : Base(b),
883  data(d) {
884  }
885  Data data;
886  };
887 
888  struct node_stats {
889  node_stats(ssize_t l, ssize_t i)
890  : leaf_nodes(l),
891  internal_nodes(i) {
892  }
893 
894  node_stats& operator+=(const node_stats &x) {
895  leaf_nodes += x.leaf_nodes;
896  internal_nodes += x.internal_nodes;
897  return *this;
898  }
899 
900  ssize_t leaf_nodes;
901  ssize_t internal_nodes;
902  };
903 
904  public:
905  typedef Params params_type;
906  typedef typename Params::key_type key_type;
907  typedef typename Params::data_type data_type;
908  typedef typename Params::mapped_type mapped_type;
909  typedef typename Params::value_type value_type;
910  typedef typename Params::key_compare key_compare;
911  typedef typename Params::pointer pointer;
912  typedef typename Params::const_pointer const_pointer;
913  typedef typename Params::reference reference;
914  typedef typename Params::const_reference const_reference;
915  typedef typename Params::size_type size_type;
916  typedef typename Params::difference_type difference_type;
917  typedef btree_iterator<node_type, reference, pointer> iterator;
918  typedef typename iterator::const_iterator const_iterator;
919  typedef std::reverse_iterator<const_iterator> const_reverse_iterator;
920  typedef std::reverse_iterator<iterator> reverse_iterator;
921 
922  typedef typename Params::allocator_type allocator_type;
923  typedef typename allocator_type::template rebind<char>::other
924  internal_allocator_type;
925 
926  public:
927  // Default constructor.
928  btree(const key_compare &comp, const allocator_type &alloc);
929 
930  // Copy constructor.
931  btree(const self_type &x);
932 
933  // Destructor.
934  ~btree() {
935  clear();
936  }
937 
938  // Iterator routines.
939  iterator begin() {
940  return iterator(leftmost(), 0);
941  }
942  const_iterator begin() const {
943  return const_iterator(leftmost(), 0);
944  }
945  iterator end() {
946  return iterator(rightmost(), rightmost() ? rightmost()->count() : 0);
947  }
948  const_iterator end() const {
949  return const_iterator(rightmost(), rightmost() ? rightmost()->count() : 0);
950  }
951  reverse_iterator rbegin() {
952  return reverse_iterator(end());
953  }
954  const_reverse_iterator rbegin() const {
955  return const_reverse_iterator(end());
956  }
957  reverse_iterator rend() {
958  return reverse_iterator(begin());
959  }
960  const_reverse_iterator rend() const {
961  return const_reverse_iterator(begin());
962  }
963 
964  // Finds the first element whose key is not less than key.
965  iterator lower_bound(const key_type &key) {
966  return internal_end(
967  internal_lower_bound(key, iterator(root(), 0)));
968  }
969  const_iterator lower_bound(const key_type &key) const {
970  return internal_end(
971  internal_lower_bound(key, const_iterator(root(), 0)));
972  }
973 
974  // Finds the first element whose key is greater than key.
975  iterator upper_bound(const key_type &key) {
976  return internal_end(
977  internal_upper_bound(key, iterator(root(), 0)));
978  }
979  const_iterator upper_bound(const key_type &key) const {
980  return internal_end(
981  internal_upper_bound(key, const_iterator(root(), 0)));
982  }
983 
984  // Finds the range of values which compare equal to key. The first member of
985  // the returned pair is equal to lower_bound(key). The second member pair of
986  // the pair is equal to upper_bound(key).
987  bpair<iterator,iterator> equal_range(const key_type &key) {
988  return make_bpair(lower_bound(key), upper_bound(key));
989  }
990  bpair<const_iterator,const_iterator> equal_range(const key_type &key) const {
991  return make_bpair(lower_bound(key), upper_bound(key));
992  }
993 
994  // Inserts a value into the btree only if it does not already exist. The
995  // boolean return value indicates whether insertion succeeded or failed. The
996  // ValuePointer type is used to avoid instatiating the value unless the key
997  // is being inserted. Value is not dereferenced if the key already exists in
998  // the btree. See bmap::operator[].
999  template <typename ValuePointer>
1000  bpair<iterator,bool> insert_unique(const key_type &key, ValuePointer value);
1001 
1002  // Inserts a value into the btree only if it does not already exist. The
1003  // boolean return value indicates whether insertion succeeded or failed.
1004  bpair<iterator,bool> insert_unique(const value_type &v) {
1005  return insert_unique(params_type::key(v), &v);
1006  }
1007 
1008  // Insert with hint. Check to see if the value should be placed immediately
1009  // before position in the tree. If it does, then the insertion will take
1010  // amortized constant time. If not, the insertion will take amortized
1011  // logarithmic time as if a call to insert_unique(v) were made.
1012  iterator insert_unique(iterator position, const value_type &v);
1013 
1014  // Insert a range of values into the btree.
1015  template <typename InputIterator>
1016  void insert_unique(InputIterator b, InputIterator e);
1017 
1018  // Inserts a value into the btree. The ValuePointer type is used to avoid
1019  // instatiating the value unless the key is being inserted. Value is not
1020  // dereferenced if the key already exists in the btree. See
1021  // bmap::operator[].
1022  template <typename ValuePointer>
1023  iterator insert_multi(const key_type &key, ValuePointer value);
1024 
1025  // Inserts a value into the btree.
1026  iterator insert_multi(const value_type &v) {
1027  return insert_multi(params_type::key(v), &v);
1028  }
1029 
1030  // Insert with hint. Check to see if the value should be placed immediately
1031  // before position in the tree. If it does, then the insertion will take
1032  // amortized constant time. If not, the insertion will take amortized
1033  // logarithmic time as if a call to insert_multi(v) were made.
1034  iterator insert_multi(iterator position, const value_type &v);
1035 
1036  // Insert a range of values into the btree.
1037  template <typename InputIterator>
1038  void insert_multi(InputIterator b, InputIterator e);
1039 
1040  void assign(const self_type &x);
1041 
1042  // Erase the specified iterator from the btree. The iterator must be valid
1043  // (i.e. not equal to end()). Return an iterator pointing to the node after
1044  // the one that was erased (or end() if none exists).
1045  iterator erase(iterator iter);
1046 
1047  // Erases range. Returns the number of keys erased.
1048  int erase(iterator begin, iterator end);
1049 
1050  // Erases the specified key from the btree. Returns 1 if an element was
1051  // erased and 0 otherwise.
1052  int erase_unique(const key_type &key);
1053 
1054  // Erases all of the entries matching the specified key from the
1055  // btree. Returns the number of elements erased.
1056  int erase_multi(const key_type &key);
1057 
1058  // Finds the iterator corresponding to a key or returns end() if the key is
1059  // not present.
1060  iterator find_unique(const key_type &key) {
1061  return internal_end(
1062  internal_find_unique(key, iterator(root(), 0)));
1063  }
1064  const_iterator find_unique(const key_type &key) const {
1065  return internal_end(
1066  internal_find_unique(key, const_iterator(root(), 0)));
1067  }
1068  iterator find_multi(const key_type &key) {
1069  return internal_end(
1070  internal_find_multi(key, iterator(root(), 0)));
1071  }
1072  const_iterator find_multi(const key_type &key) const {
1073  return internal_end(
1074  internal_find_multi(key, const_iterator(root(), 0)));
1075  }
1076 
1077  // Returns a count of the number of times the key appears in the btree.
1078  size_type count_unique(const key_type &key) const {
1079  const_iterator begin = internal_find_unique(
1080  key, const_iterator(root(), 0));
1081  if (!begin.node) {
1082  // The key doesn't exist in the tree.
1083  return 0;
1084  }
1085  return 1;
1086  }
1087  // Returns a count of the number of times the key appears in the btree.
1088  size_type count_multi(const key_type &key) const {
1089  return distance(lower_bound(key), upper_bound(key));
1090  }
1091 
1092  // Clear the btree, deleting all of the values it contains.
1093  void clear();
1094 
1095  // Swap the contents of *this and x.
1096  void swap(self_type &x);
1097 
1098  // Assign the contents of x to *this.
1099  self_type& operator=(const self_type &x) {
1100  if (&x == this) {
1101  // Don't copy onto ourselves.
1102  return *this;
1103  }
1104  assign(x);
1105  return *this;
1106  }
1107 
1108  key_compare* mutable_key_comp() {
1109  return this;
1110  }
1111  const key_compare& key_comp() const {
1112  return *this;
1113  }
1114  bool compare_keys(const key_type &x, const key_type &y) const {
1115  return btree_compare_keys(key_comp(), x, y);
1116  }
1117 
1118 #if defined COMPILE_BTREE_DUMP
1119  // Dump the btree to the specified ostream. Requires that operator<< is
1120  // defined for Key and Value.
1121  void dump(std::ostream &os) const {
1122  if (root() != NULL) {
1123  internal_dump(os, root(), 0);
1124  }
1125  }
1126 #endif
1127 
1128  // Verifies the structure of the btree.
1129  void verify() const;
1130 
1131  // Size routines. Note that empty() is slightly faster than doing size()==0.
1132  size_type size() const {
1133  if (empty()) return 0;
1134  if (root()->leaf()) return root()->count();
1135  return root()->size();
1136  }
1137  size_type max_size() const { return std::numeric_limits<size_type>::max(); }
1138  bool empty() const { return root() == NULL; }
1139 
1140  // The height of the btree. An empty tree will have height 0.
1141  size_type height() const {
1142  size_type h = 0;
1143  if (root()) {
1144  // Count the length of the chain from the leftmost node up to the
1145  // root. We actually count from the root back around to the level below
1146  // the root, but the calculation is the same because of the circularity
1147  // of that traversal.
1148  const node_type *n = root();
1149  do {
1150  ++h;
1151  n = n->parent();
1152  } while (n != root());
1153  }
1154  return h;
1155  }
1156 
1157  // The number of internal, leaf and total nodes used by the btree.
1158  size_type leaf_nodes() const {
1159  return internal_stats(root()).leaf_nodes;
1160  }
1161  size_type internal_nodes() const {
1162  return internal_stats(root()).internal_nodes;
1163  }
1164  size_type nodes() const {
1165  node_stats stats = internal_stats(root());
1166  return stats.leaf_nodes + stats.internal_nodes;
1167  }
1168 
1169  // The total number of bytes used by the btree.
1170  size_type bytes_used() const {
1171  node_stats stats = internal_stats(root());
1172  if (stats.leaf_nodes == 1 && stats.internal_nodes == 0) {
1173  return sizeof(*this) +
1174  sizeof(base_fields) + root()->max_count() * sizeof(value_type);
1175  } else {
1176  return sizeof(*this) +
1177  sizeof(root_fields) - sizeof(internal_fields) +
1178  stats.leaf_nodes * sizeof(leaf_fields) +
1179  stats.internal_nodes * sizeof(internal_fields);
1180  }
1181  }
1182 
1183  // The average number of bytes used per value stored in the btree.
1184  static double average_bytes_per_value() {
1185  // Returns the number of bytes per value on a leaf node that is 75%
1186  // full. Experimentally, this matches up nicely with the computed number of
1187  // bytes per value in trees that had their values inserted in random order.
1188  return sizeof(leaf_fields) / (kNodeValues * 0.75);
1189  }
1190 
1191  // The fullness of the btree. Computed as the number of elements in the btree
1192  // divided by the maximum number of elements a tree with the current number
1193  // of nodes could hold. A value of 1 indicates perfect space
1194  // utilization. Smaller values indicate space wastage.
1195  double fullness() const {
1196  return double(size()) / (nodes() * kNodeValues);
1197  }
1198  // The overhead of the btree structure in bytes per node. Computed as the
1199  // total number of bytes used by the btree minus the number of bytes used for
1200  // storing elements divided by the number of elements.
1201  double overhead() const {
1202  if (empty()) {
1203  return 0.0;
1204  }
1205  return (bytes_used() - size() * kValueSize) / double(size());
1206  }
1207 
1208  private:
1209  // Internal accessor routines.
1210  node_type* root() { return root_.data; }
1211  const node_type* root() const { return root_.data; }
1212  node_type** mutable_root() { return &root_.data; }
1213 
1214  // The rightmost node is stored in the root node.
1215  node_type* rightmost() {
1216  return (!root() || root()->leaf()) ? root() : root()->rightmost();
1217  }
1218  const node_type* rightmost() const {
1219  return (!root() || root()->leaf()) ? root() : root()->rightmost();
1220  }
1221  node_type** mutable_rightmost() { return root()->mutable_rightmost(); }
1222 
1223  // The leftmost node is stored as the parent of the root node.
1224  node_type* leftmost() { return root() ? root()->parent() : NULL; }
1225  const node_type* leftmost() const { return root() ? root()->parent() : NULL; }
1226 
1227  // The size of the tree is stored in the root node.
1228  size_type* mutable_size() { return root()->mutable_size(); }
1229 
1230  // Allocator routines.
1231  internal_allocator_type* mutable_internal_allocator() {
1232  return static_cast<internal_allocator_type*>(&root_);
1233  }
1234  const internal_allocator_type& internal_allocator() const {
1235  return *static_cast<const internal_allocator_type*>(&root_);
1236  }
1237 
1238  // Node creation/deletion routines.
1239  node_type* new_internal_node(node_type *parent) {
1240  internal_fields *p = reinterpret_cast<internal_fields*>(
1241  mutable_internal_allocator()->allocate(sizeof(internal_fields)));
1242  return node_type::init_internal(p, parent);
1243  }
1244  node_type* new_internal_root_node() {
1245  root_fields *p = reinterpret_cast<root_fields*>(
1246  mutable_internal_allocator()->allocate(sizeof(root_fields)));
1247  return node_type::init_root(p, root()->parent());
1248  }
1249  node_type* new_leaf_node(node_type *parent) {
1250  leaf_fields *p = reinterpret_cast<leaf_fields*>(
1251  mutable_internal_allocator()->allocate(sizeof(leaf_fields)));
1252  return node_type::init_leaf(p, parent, kNodeValues);
1253  }
1254  node_type* new_leaf_root_node(int max_count) {
1255  leaf_fields *p = reinterpret_cast<leaf_fields*>(
1256  mutable_internal_allocator()->allocate(
1257  sizeof(base_fields) + max_count * sizeof(value_type)));
1258  return node_type::init_leaf(p, reinterpret_cast<node_type*>(p), max_count);
1259  }
1260  void delete_internal_node(node_type *node) {
1261  node->destroy();
1262  assert(node != root());
1263  mutable_internal_allocator()->deallocate(
1264  reinterpret_cast<char*>(node), sizeof(internal_fields));
1265  }
1266  void delete_internal_root_node() {
1267  root()->destroy();
1268  mutable_internal_allocator()->deallocate(
1269  reinterpret_cast<char*>(root()), sizeof(root_fields));
1270  }
1271  void delete_leaf_node(node_type *node) {
1272  node->destroy();
1273  mutable_internal_allocator()->deallocate(
1274  reinterpret_cast<char*>(node),
1275  sizeof(base_fields) + node->max_count() * sizeof(value_type));
1276  }
1277 
1278  // Rebalances or splits the node iter points to.
1279  void rebalance_or_split(iterator *iter);
1280 
1281  // Merges the values of left, right and the delimiting key on their parent
1282  // onto left, removing the delimiting key and deleting right.
1283  void merge_nodes(node_type *left, node_type *right);
1284 
1285  // Tries to merge node with its left or right sibling, and failing that,
1286  // rebalance with its left or right sibling. Returns true if a merge
1287  // occurred, at which point it is no longer valid to access node. Returns
1288  // false if no merging took place.
1289  bool try_merge_or_rebalance(iterator *iter);
1290 
1291  // Tries to shrink the height of the tree by 1.
1292  void try_shrink();
1293 
1294  iterator internal_end(iterator iter) {
1295  return iter.node ? iter : end();
1296  }
1297  const_iterator internal_end(const_iterator iter) const {
1298  return iter.node ? iter : end();
1299  }
1300 
1301  // Inserts a value into the btree immediately before iter. Requires that
1302  // key(v) <= iter.key() and (--iter).key() <= key(v).
1303  iterator internal_insert(iterator iter, const value_type &v);
1304 
1305  // Returns an iterator pointing to the first value >= the value "iter" is
1306  // pointing at. Note that "iter" might be pointing to an invalid location as
1307  // iter.position == iter.node->count(). This routine simply moves iter up in
1308  // the tree to a valid location.
1309  template <typename IterType>
1310  static IterType internal_last(IterType iter);
1311 
1312  // Returns an iterator pointing to the leaf position at which key would
1313  // reside in the tree. We provide 2 versions of internal_locate. The first
1314  // version (internal_locate_plain_compare) always returns 0 for the second
1315  // field of the pair. The second version (internal_locate_compare_to) is for
1316  // the key-compare-to specialization and returns either kExactMatch (if the
1317  // key was found in the tree) or -kExactMatch (if it wasn't) in the second
1318  // field of the pair. The compare_to specialization allows the caller to
1319  // avoid a subsequent comparison to determine if an exact match was made,
1320  // speeding up string keys.
1321  template <typename IterType>
1322  bpair<IterType, int> internal_locate(
1323  const key_type &key, IterType iter) const;
1324  template <typename IterType>
1325  bpair<IterType, int> internal_locate_plain_compare(
1326  const key_type &key, IterType iter) const;
1327  template <typename IterType>
1328  bpair<IterType, int> internal_locate_compare_to(
1329  const key_type &key, IterType iter) const;
1330 
1331  // Internal routine which implements lower_bound().
1332  template <typename IterType>
1333  IterType internal_lower_bound(
1334  const key_type &key, IterType iter) const;
1335 
1336  // Internal routine which implements upper_bound().
1337  template <typename IterType>
1338  IterType internal_upper_bound(
1339  const key_type &key, IterType iter) const;
1340 
1341  // Internal routine which implements find_unique().
1342  template <typename IterType>
1343  IterType internal_find_unique(
1344  const key_type &key, IterType iter) const;
1345 
1346  // Internal routine which implements find_multi().
1347  template <typename IterType>
1348  IterType internal_find_multi(
1349  const key_type &key, IterType iter) const;
1350 
1351  // Deletes a node and all of its children.
1352  void internal_clear(node_type *node);
1353 
1354 #if defined COMPILE_BTREE_DUMP
1355  // Dumps a node and all of its children to the specified ostream.
1356  void internal_dump(std::ostream &os, const node_type *node, int level) const;
1357 #endif
1358 
1359  // Verifies the tree structure of node.
1360  int internal_verify(const node_type *node,
1361  const key_type *lo, const key_type *hi) const;
1362 
1363  node_stats internal_stats(const node_type *node) const {
1364  if (!node) {
1365  return node_stats(0, 0);
1366  }
1367  if (node->leaf()) {
1368  return node_stats(1, 0);
1369  }
1370  node_stats res(0, 1);
1371  for (int i = 0; i <= node->count(); ++i) {
1372  res += internal_stats(node->child(i));
1373  }
1374  return res;
1375  }
1376 
1377  private:
1378  empty_base_handle<internal_allocator_type, node_type*> root_;
1379 
1380  private:
1381  // A never instantiated helper function that returns big_ if we have a
1382  // key-compare-to functor or if R is bool and small_ otherwise.
1383  template <typename R>
1384  static typename if_<
1385  if_<is_key_compare_to::value,
1386  std::is_same<R, int>,
1387  std::is_same<R, bool> >::type::value,
1388  big_, small_>::type key_compare_checker(R);
1389 
1390  // A never instantiated helper function that returns the key comparison
1391  // functor.
1392  static key_compare key_compare_helper();
1393 
1394  // Verify that key_compare returns a bool. This is similar to the way
1395  // is_convertible in base/type_traits.h works. Note that key_compare_checker
1396  // is never actually invoked. The compiler will select which
1397  // key_compare_checker() to instantiate and then figure out the size of the
1398  // return type of key_compare_checker() at compile time which we then check
1399  // against the sizeof of big_.
1400 #if REMOVED_DOESNT_COMPILE // BENTLEY
1401  static_assert(sizeof(key_compare_checker(key_compare_helper()(key_type(), key_type()))) == sizeof(big_), "key_comparison_function_must_return_bool");
1402 #endif
1403 
1404  // Note: We insist on kTargetValues, which is computed from
1405  // Params::kTargetNodeSize, must fit the base_fields::field_type.
1406  static_assert(kNodeValues <
1407  (1 << (8 * sizeof(typename base_fields::field_type))),
1408  "target_node_size_too_large");
1409 
1410  // Test the assumption made in setting kNodeValueSpace.
1411  static_assert(sizeof(base_fields) >= 2 * sizeof(void*),
1412  "node_space_assumption_incorrect");
1413 };
1414 
1416 // btree_node methods
1417 template <typename P>
1418 inline void btree_node<P>::insert_value(int i, const value_type &x) {
1419  assert(i <= count());
1420  value_init(count(), x);
1421  for (int j = count(); j > i; --j) {
1422  value_swap(j, this, j - 1);
1423  }
1424  set_count(count() + 1);
1425 
1426  if (!leaf()) {
1427  ++i;
1428  for (int j = count(); j > i; --j) {
1429  *mutable_child(j) = child(j - 1);
1430  child(j)->set_position(j);
1431  }
1432  *mutable_child(i) = NULL;
1433  }
1434 }
1435 
1436 template <typename P>
1437 inline void btree_node<P>::remove_value(int i) {
1438  if (!leaf()) {
1439  assert(child(i + 1)->count() == 0);
1440  for (int j = i + 1; j < count(); ++j) {
1441  *mutable_child(j) = child(j + 1);
1442  child(j)->set_position(j);
1443  }
1444  *mutable_child(count()) = NULL;
1445  }
1446 
1447  set_count(count() - 1);
1448  for (; i < count(); ++i) {
1449  value_swap(i, this, i + 1);
1450  }
1451  value_destroy(i);
1452 }
1453 
1454 template <typename P>
1455 void btree_node<P>::rebalance_right_to_left(btree_node *src, int to_move) {
1456  assert(parent() == src->parent());
1457  assert(position() + 1 == src->position());
1458  assert(src->count() >= count());
1459  assert(to_move >= 1);
1460  assert(to_move <= src->count());
1461 
1462  // Make room in the left node for the new values.
1463  for (int i = 0; i < to_move; ++i) {
1464  value_init(i + count());
1465  }
1466 
1467  // Move the delimiting value to the left node and the new delimiting value
1468  // from the right node.
1469  value_swap(count(), parent(), position());
1470  parent()->value_swap(position(), src, to_move - 1);
1471 
1472  // Move the values from the right to the left node.
1473  for (int i = 1; i < to_move; ++i) {
1474  value_swap(count() + i, src, i - 1);
1475  }
1476  // Shift the values in the right node to their correct position.
1477  for (int i = to_move; i < src->count(); ++i) {
1478  src->value_swap(i - to_move, src, i);
1479  }
1480  for (int i = 1; i <= to_move; ++i) {
1481  src->value_destroy(src->count() - i);
1482  }
1483 
1484  if (!leaf()) {
1485  // Move the child pointers from the right to the left node.
1486  for (int i = 0; i < to_move; ++i) {
1487  set_child(1 + count() + i, src->child(i));
1488  }
1489  for (int i = 0; i <= src->count() - to_move; ++i) {
1490  assert(i + to_move <= src->max_count());
1491  src->set_child(i, src->child(i + to_move));
1492  *src->mutable_child(i + to_move) = NULL;
1493  }
1494  }
1495 
1496  // Fixup the counts on the src and dest nodes.
1497  set_count(count() + to_move);
1498  src->set_count(src->count() - to_move);
1499 }
1500 
1501 template <typename P>
1502 void btree_node<P>::rebalance_left_to_right(btree_node *dest, int to_move) {
1503  assert(parent() == dest->parent());
1504  assert(position() + 1 == dest->position());
1505  assert(count() >= dest->count());
1506  assert(to_move >= 1);
1507  assert(to_move <= count());
1508 
1509  // Make room in the right node for the new values.
1510  for (int i = 0; i < to_move; ++i) {
1511  dest->value_init(i + dest->count());
1512  }
1513  for (int i = dest->count() - 1; i >= 0; --i) {
1514  dest->value_swap(i, dest, i + to_move);
1515  }
1516 
1517  // Move the delimiting value to the right node and the new delimiting value
1518  // from the left node.
1519  dest->value_swap(to_move - 1, parent(), position());
1520  parent()->value_swap(position(), this, count() - to_move);
1521  value_destroy(count() - to_move);
1522 
1523  // Move the values from the left to the right node.
1524  for (int i = 1; i < to_move; ++i) {
1525  value_swap(count() - to_move + i, dest, i - 1);
1526  value_destroy(count() - to_move + i);
1527  }
1528 
1529  if (!leaf()) {
1530  // Move the child pointers from the left to the right node.
1531  for (int i = dest->count(); i >= 0; --i) {
1532  dest->set_child(i + to_move, dest->child(i));
1533  *dest->mutable_child(i) = NULL;
1534  }
1535  for (int i = 1; i <= to_move; ++i) {
1536  dest->set_child(i - 1, child(count() - to_move + i));
1537  *mutable_child(count() - to_move + i) = NULL;
1538  }
1539  }
1540 
1541  // Fixup the counts on the src and dest nodes.
1542  set_count(count() - to_move);
1543  dest->set_count(dest->count() + to_move);
1544 }
1545 
1546 template <typename P>
1547 void btree_node<P>::split(btree_node *dest, int insert_position) {
1548  assert(dest->count() == 0);
1549 
1550  // We bias the split based on the position being inserted. If we're
1551  // inserting at the beginning of the left node then bias the split to put
1552  // more values on the right node. If we're inserting at the end of the
1553  // right node then bias the split to put more values on the left node.
1554  if (insert_position == 0) {
1555  dest->set_count(count() - 1);
1556  } else if (insert_position == max_count()) {
1557  dest->set_count(0);
1558  } else {
1559  dest->set_count(count() / 2);
1560  }
1561  set_count(count() - dest->count());
1562  assert(count() >= 1);
1563 
1564  // Move values from the left sibling to the right sibling.
1565  for (int i = 0; i < dest->count(); ++i) {
1566  dest->value_init(i);
1567  value_swap(count() + i, dest, i);
1568  value_destroy(count() + i);
1569  }
1570 
1571  // The split key is the largest value in the left sibling.
1572  set_count(count() - 1);
1573  parent()->insert_value(position(), value_type());
1574  value_swap(count(), parent(), position());
1575  value_destroy(count());
1576  parent()->set_child(position() + 1, dest);
1577 
1578  if (!leaf()) {
1579  for (int i = 0; i <= dest->count(); ++i) {
1580  assert(child(count() + i + 1) != NULL);
1581  dest->set_child(i, child(count() + i + 1));
1582  *mutable_child(count() + i + 1) = NULL;
1583  }
1584  }
1585 }
1586 
1587 template <typename P>
1588 void btree_node<P>::merge(btree_node *src) {
1589  assert(parent() == src->parent());
1590  assert(position() + 1 == src->position());
1591 
1592  // Move the delimiting value to the left node.
1593  value_init(count());
1594  value_swap(count(), parent(), position());
1595 
1596  // Move the values from the right to the left node.
1597  for (int i = 0; i < src->count(); ++i) {
1598  value_init(1 + count() + i);
1599  value_swap(1 + count() + i, src, i);
1600  src->value_destroy(i);
1601  }
1602 
1603  if (!leaf()) {
1604  // Move the child pointers from the right to the left node.
1605  for (int i = 0; i <= src->count(); ++i) {
1606  set_child(1 + count() + i, src->child(i));
1607  *src->mutable_child(i) = NULL;
1608  }
1609  }
1610 
1611  // Fixup the counts on the src and dest nodes.
1612  set_count(1 + count() + src->count());
1613  src->set_count(0);
1614 
1615  // Remove the value on the parent node.
1616  parent()->remove_value(position());
1617 }
1618 
1619 template <typename P>
1620 void btree_node<P>::swap(btree_node *x) {
1621  assert(leaf() == x->leaf());
1622 
1623  // Swap the values.
1624  for (int i = count(); i < x->count(); ++i) {
1625  value_init(i);
1626  }
1627  for (int i = x->count(); i < count(); ++i) {
1628  x->value_init(i);
1629  }
1630  int n = std::max(count(), x->count());
1631  for (int i = 0; i < n; ++i) {
1632  value_swap(i, x, i);
1633  }
1634  for (int i = count(); i < x->count(); ++i) {
1635  x->value_destroy(i);
1636  }
1637  for (int i = x->count(); i < count(); ++i) {
1638  value_destroy(i);
1639  }
1640 
1641  if (!leaf()) {
1642  // Swap the child pointers.
1643  for (int i = 0; i <= n; ++i) {
1644  btree_swap_helper(*mutable_child(i), *x->mutable_child(i));
1645  }
1646  for (int i = 0; i <= count(); ++i) {
1647  x->child(i)->fields_.parent = x;
1648  }
1649  for (int i = 0; i <= x->count(); ++i) {
1650  child(i)->fields_.parent = this;
1651  }
1652  }
1653 
1654  // Swap the counts.
1655  btree_swap_helper(fields_.count, x->fields_.count);
1656 }
1657 
1659 // btree_iterator methods
1660 template <typename N, typename R, typename P>
1661 void btree_iterator<N, R, P>::increment_slow() {
1662  if (node->leaf()) {
1663  assert(position >= node->count());
1664  self_type save(*this);
1665  while (position == node->count() && !node->is_root()) {
1666  assert(node->parent()->child(node->position()) == node);
1667  position = node->position();
1668  node = node->parent();
1669  }
1670  if (position == node->count()) {
1671  *this = save;
1672  }
1673  } else {
1674  assert(position < node->count());
1675  node = node->child(position + 1);
1676  while (!node->leaf()) {
1677  node = node->child(0);
1678  }
1679  position = 0;
1680  }
1681 }
1682 
1683 template <typename N, typename R, typename P>
1684 void btree_iterator<N, R, P>::increment_by(int count) {
1685  while (count > 0) {
1686  if (node->leaf()) {
1687  int rest = node->count() - position;
1688  position += std::min(rest, count);
1689  count = count - rest;
1690  if (position < node->count()) {
1691  return;
1692  }
1693  } else {
1694  --count;
1695  }
1696  increment_slow();
1697  }
1698 }
1699 
1700 template <typename N, typename R, typename P>
1701 void btree_iterator<N, R, P>::decrement_slow() {
1702  if (node->leaf()) {
1703  assert(position <= -1);
1704  self_type save(*this);
1705  while (position < 0 && !node->is_root()) {
1706  assert(node->parent()->child(node->position()) == node);
1707  position = node->position() - 1;
1708  node = node->parent();
1709  }
1710  if (position < 0) {
1711  *this = save;
1712  }
1713  } else {
1714  assert(position >= 0);
1715  node = node->child(position);
1716  while (!node->leaf()) {
1717  node = node->child(node->count());
1718  }
1719  position = node->count() - 1;
1720  }
1721 }
1722 
1724 // btree methods
1725 template <typename P>
1726 btree<P>::btree(const key_compare &comp, const allocator_type &alloc)
1727  : key_compare(comp),
1728  root_(alloc, NULL) {
1729 }
1730 
1731 template <typename P>
1732 btree<P>::btree(const self_type &x)
1733  : key_compare(x.key_comp()),
1734  root_(x.internal_allocator(), NULL) {
1735  assign(x);
1736 }
1737 
1738 template <typename P> template <typename ValuePointer>
1740 btree<P>::insert_unique(const key_type &key, ValuePointer value) {
1741  if (empty()) {
1742  *mutable_root() = new_leaf_root_node(1);
1743  }
1744 
1745  bpair<iterator, int> res = internal_locate(key, iterator(root(), 0));
1746  iterator &iter = res.first;
1747  if (res.second == kExactMatch) {
1748  // The key already exists in the tree, do nothing.
1749  return make_bpair(internal_last(iter), false);
1750  } else if (!res.second) {
1751  iterator last = internal_last(iter);
1752  if (last.node && !compare_keys(key, last.key())) {
1753  // The key already exists in the tree, do nothing.
1754  return make_bpair(last, false);
1755  }
1756  }
1757 
1758  return make_bpair(internal_insert(iter, *value), true);
1759 }
1760 
1761 template <typename P>
1762 inline typename btree<P>::iterator
1763 btree<P>::insert_unique(iterator position, const value_type &v) {
1764  if (!empty()) {
1765  const key_type &key = params_type::key(v);
1766  if (position == end() || compare_keys(key, position.key())) {
1767  iterator prev = position;
1768  if (position == begin() || compare_keys((--prev).key(), key)) {
1769  // prev.key() < key < position.key()
1770  return internal_insert(position, v);
1771  }
1772  } else if (compare_keys(position.key(), key)) {
1773  iterator next = position;
1774  ++next;
1775  if (next == end() || compare_keys(key, next.key())) {
1776  // position.key() < key < next.key()
1777  return internal_insert(next, v);
1778  }
1779  } else {
1780  // position.key() == key
1781  return position;
1782  }
1783  }
1784  return insert_unique(v).first;
1785 }
1786 
1787 template <typename P> template <typename InputIterator>
1788 void btree<P>::insert_unique(InputIterator b, InputIterator e) {
1789  for (; b != e; ++b) {
1790  insert_unique(end(), *b);
1791  }
1792 }
1793 
1794 template <typename P> template <typename ValuePointer>
1795 typename btree<P>::iterator
1796 btree<P>::insert_multi(const key_type &key, ValuePointer value) {
1797  if (empty()) {
1798  *mutable_root() = new_leaf_root_node(1);
1799  }
1800 
1801  iterator iter = internal_upper_bound(key, iterator(root(), 0));
1802  if (!iter.node) {
1803  iter = end();
1804  }
1805  return internal_insert(iter, *value);
1806 }
1807 
1808 template <typename P>
1809 typename btree<P>::iterator
1810 btree<P>::insert_multi(iterator position, const value_type &v) {
1811  if (!empty()) {
1812  const key_type &key = params_type::key(v);
1813  if (position == end() || !compare_keys(position.key(), key)) {
1814  iterator prev = position;
1815  if (position == begin() || !compare_keys(key, (--prev).key())) {
1816  // prev.key() <= key <= position.key()
1817  return internal_insert(position, v);
1818  }
1819  } else {
1820  iterator next = position;
1821  ++next;
1822  if (next == end() || !compare_keys(next.key(), key)) {
1823  // position.key() < key <= next.key()
1824  return internal_insert(next, v);
1825  }
1826  }
1827  }
1828  return insert_multi(v);
1829 }
1830 
1831 template <typename P> template <typename InputIterator>
1832 void btree<P>::insert_multi(InputIterator b, InputIterator e) {
1833  for (; b != e; ++b) {
1834  insert_multi(end(), *b);
1835  }
1836 }
1837 
1838 template <typename P>
1839 void btree<P>::assign(const self_type &x) {
1840  clear();
1841 
1842  *mutable_key_comp() = x.key_comp();
1843  *mutable_internal_allocator() = x.internal_allocator();
1844 
1845  // Assignment can avoid key comparisons because we know the order of the
1846  // values is the same order we'll store them in.
1847  for (const_iterator iter = x.begin(); iter != x.end(); ++iter) {
1848  if (empty()) {
1849  insert_multi(*iter);
1850  } else {
1851  // If the btree is not empty, we can just insert the new value at the end
1852  // of the tree!
1853  internal_insert(end(), *iter);
1854  }
1855  }
1856 }
1857 
1858 template <typename P>
1859 typename btree<P>::iterator btree<P>::erase(iterator iter) {
1860  bool internal_delete = false;
1861  if (!iter.node->leaf()) {
1862  // Deletion of a value on an internal node. Swap the key with the largest
1863  // value of our left child. This is easy, we just decrement iter.
1864  iterator tmp_iter(iter--);
1865  assert(iter.node->leaf());
1866  assert(!compare_keys(tmp_iter.key(), iter.key()));
1867  iter.node->value_swap(iter.position, tmp_iter.node, tmp_iter.position);
1868  internal_delete = true;
1869  --*mutable_size();
1870  } else if (!root()->leaf()) {
1871  --*mutable_size();
1872  }
1873 
1874  // Delete the key from the leaf.
1875  iter.node->remove_value(iter.position);
1876 
1877  // We want to return the next value after the one we just erased. If we
1878  // erased from an internal node (internal_delete == true), then the next
1879  // value is ++(++iter). If we erased from a leaf node (internal_delete ==
1880  // false) then the next value is ++iter. Note that ++iter may point to an
1881  // internal node and the value in the internal node may move to a leaf node
1882  // (iter.node) when rebalancing is performed at the leaf level.
1883 
1884  // Merge/rebalance as we walk back up the tree.
1885  iterator res(iter);
1886  for (;;) {
1887  if (iter.node == root()) {
1888  try_shrink();
1889  if (empty()) {
1890  return end();
1891  }
1892  break;
1893  }
1894  if (iter.node->count() >= kMinNodeValues) {
1895  break;
1896  }
1897  bool merged = try_merge_or_rebalance(&iter);
1898  if (iter.node->leaf()) {
1899  res = iter;
1900  }
1901  if (!merged) {
1902  break;
1903  }
1904  iter.node = iter.node->parent();
1905  }
1906 
1907  // Adjust our return value. If we're pointing at the end of a node, advance
1908  // the iterator.
1909  if (res.position == res.node->count()) {
1910  res.position = res.node->count() - 1;
1911  ++res;
1912  }
1913  // If we erased from an internal node, advance the iterator.
1914  if (internal_delete) {
1915  ++res;
1916  }
1917  return res;
1918 }
1919 
1920 template <typename P>
1921 int btree<P>::erase(iterator begin, iterator end) {
1922  int count = (int) std::distance(begin, end); // BENTLEY CHANGES - (int) VS2012
1923  for (int i = 0; i < count; i++) {
1924  begin = erase(begin);
1925  }
1926  return count;
1927 }
1928 
1929 template <typename P>
1930 int btree<P>::erase_unique(const key_type &key) {
1931  iterator iter = internal_find_unique(key, iterator(root(), 0));
1932  if (!iter.node) {
1933  // The key doesn't exist in the tree, return nothing done.
1934  return 0;
1935  }
1936  erase(iter);
1937  return 1;
1938 }
1939 
1940 template <typename P>
1941 int btree<P>::erase_multi(const key_type &key) {
1942  iterator begin = internal_lower_bound(key, iterator(root(), 0));
1943  if (!begin.node) {
1944  // The key doesn't exist in the tree, return nothing done.
1945  return 0;
1946  }
1947  // Delete all of the keys between begin and upper_bound(key).
1948  iterator end = internal_end(
1949  internal_upper_bound(key, iterator(root(), 0)));
1950  return erase(begin, end);
1951 }
1952 
1953 template <typename P>
1954 void btree<P>::clear() {
1955  if (root() != NULL) {
1956  internal_clear(root());
1957  }
1958  *mutable_root() = NULL;
1959 }
1960 
1961 template <typename P>
1962 void btree<P>::swap(self_type &x) {
1963  std::swap(static_cast<key_compare&>(*this), static_cast<key_compare&>(x));
1964  std::swap(root_, x.root_);
1965 }
1966 
1967 template <typename P>
1968 void btree<P>::verify() const {
1969  if (root() != NULL) {
1970  assert(size() == internal_verify(root(), NULL, NULL));
1971  assert(leftmost() == (++const_iterator(root(), -1)).node);
1972  assert(rightmost() == (--const_iterator(root(), root()->count())).node);
1973  assert(leftmost()->leaf());
1974  assert(rightmost()->leaf());
1975  } else {
1976  assert(size() == 0);
1977  assert(leftmost() == NULL);
1978  assert(rightmost() == NULL);
1979  }
1980 }
1981 
1982 template <typename P>
1983 void btree<P>::rebalance_or_split(iterator *iter) {
1984  node_type *&node = iter->node;
1985  int &insert_position = iter->position;
1986  assert(node->count() == node->max_count());
1987 
1988  // First try to make room on the node by rebalancing.
1989  node_type *parent = node->parent();
1990  if (node != root()) {
1991  if (node->position() > 0) {
1992  // Try rebalancing with our left sibling.
1993  node_type *left = parent->child(node->position() - 1);
1994  if (left->count() < left->max_count()) {
1995  // We bias rebalancing based on the position being inserted. If we're
1996  // inserting at the end of the right node then we bias rebalancing to
1997  // fill up the left node.
1998  int to_move = (left->max_count() - left->count()) /
1999  (1 + (insert_position < left->max_count()));
2000  to_move = std::max(1, to_move);
2001 
2002  if (((insert_position - to_move) >= 0) ||
2003  ((left->count() + to_move) < left->max_count())) {
2004  left->rebalance_right_to_left(node, to_move);
2005 
2006  assert(node->max_count() - node->count() == to_move);
2007  insert_position = insert_position - to_move;
2008  if (insert_position < 0) {
2009  insert_position = insert_position + left->count() + 1;
2010  node = left;
2011  }
2012 
2013  assert(node->count() < node->max_count());
2014  return;
2015  }
2016  }
2017  }
2018 
2019  if (node->position() < parent->count()) {
2020  // Try rebalancing with our right sibling.
2021  node_type *right = parent->child(node->position() + 1);
2022  if (right->count() < right->max_count()) {
2023  // We bias rebalancing based on the position being inserted. If we're
2024  // inserting at the beginning of the left node then we bias rebalancing
2025  // to fill up the right node.
2026  int to_move = (right->max_count() - right->count()) /
2027  (1 + (insert_position > 0));
2028  to_move = std::max(1, to_move);
2029 
2030  if ((insert_position <= (node->count() - to_move)) ||
2031  ((right->count() + to_move) < right->max_count())) {
2032  node->rebalance_left_to_right(right, to_move);
2033 
2034  if (insert_position > node->count()) {
2035  insert_position = insert_position - node->count() - 1;
2036  node = right;
2037  }
2038 
2039  assert(node->count() < node->max_count());
2040  return;
2041  }
2042  }
2043  }
2044 
2045  // Rebalancing failed, make sure there is room on the parent node for a new
2046  // value.
2047  if (parent->count() == parent->max_count()) {
2048  iterator parent_iter(node->parent(), node->position());
2049  rebalance_or_split(&parent_iter);
2050  }
2051  } else {
2052  // Rebalancing not possible because this is the root node.
2053  if (root()->leaf()) {
2054  // The root node is currently a leaf node: create a new root node and set
2055  // the current root node as the child of the new root.
2056  parent = new_internal_root_node();
2057  parent->set_child(0, root());
2058  *mutable_root() = parent;
2059  assert(*mutable_rightmost() == parent->child(0));
2060  } else {
2061  // The root node is an internal node. We do not want to create a new root
2062  // node because the root node is special and holds the size of the tree
2063  // and a pointer to the rightmost node. So we create a new internal node
2064  // and move all of the items on the current root into the new node.
2065  parent = new_internal_node(parent);
2066  parent->set_child(0, parent);
2067  parent->swap(root());
2068  node = parent;
2069  }
2070  }
2071 
2072  // Split the node.
2073  node_type *split_node;
2074  if (node->leaf()) {
2075  split_node = new_leaf_node(parent);
2076  node->split(split_node, insert_position);
2077  if (rightmost() == node) {
2078  *mutable_rightmost() = split_node;
2079  }
2080  } else {
2081  split_node = new_internal_node(parent);
2082  node->split(split_node, insert_position);
2083  }
2084 
2085  if (insert_position > node->count()) {
2086  insert_position = insert_position - node->count() - 1;
2087  node = split_node;
2088  }
2089 }
2090 
2091 template <typename P>
2092 void btree<P>::merge_nodes(node_type *left, node_type *right) {
2093  left->merge(right);
2094  if (right->leaf()) {
2095  if (rightmost() == right) {
2096  *mutable_rightmost() = left;
2097  }
2098  delete_leaf_node(right);
2099  } else {
2100  delete_internal_node(right);
2101  }
2102 }
2103 
2104 template <typename P>
2105 bool btree<P>::try_merge_or_rebalance(iterator *iter) {
2106  node_type *parent = iter->node->parent();
2107  if (iter->node->position() > 0) {
2108  // Try merging with our left sibling.
2109  node_type *left = parent->child(iter->node->position() - 1);
2110  if ((1 + left->count() + iter->node->count()) <= left->max_count()) {
2111  iter->position += 1 + left->count();
2112  merge_nodes(left, iter->node);
2113  iter->node = left;
2114  return true;
2115  }
2116  }
2117  if (iter->node->position() < parent->count()) {
2118  // Try merging with our right sibling.
2119  node_type *right = parent->child(iter->node->position() + 1);
2120  if ((1 + iter->node->count() + right->count()) <= right->max_count()) {
2121  merge_nodes(iter->node, right);
2122  return true;
2123  }
2124  // Try rebalancing with our right sibling. We don't perform rebalancing if
2125  // we deleted the first element from iter->node and the node is not
2126  // empty. This is a small optimization for the common pattern of deleting
2127  // from the front of the tree.
2128  if ((right->count() > kMinNodeValues) &&
2129  ((iter->node->count() == 0) ||
2130  (iter->position > 0))) {
2131  int to_move = (right->count() - iter->node->count()) / 2;
2132  to_move = std::min(to_move, right->count() - 1);
2133  iter->node->rebalance_right_to_left(right, to_move);
2134  return false;
2135  }
2136  }
2137  if (iter->node->position() > 0) {
2138  // Try rebalancing with our left sibling. We don't perform rebalancing if
2139  // we deleted the last element from iter->node and the node is not
2140  // empty. This is a small optimization for the common pattern of deleting
2141  // from the back of the tree.
2142  node_type *left = parent->child(iter->node->position() - 1);
2143  if ((left->count() > kMinNodeValues) &&
2144  ((iter->node->count() == 0) ||
2145  (iter->position < iter->node->count()))) {
2146  int to_move = (left->count() - iter->node->count()) / 2;
2147  to_move = std::min(to_move, left->count() - 1);
2148  left->rebalance_left_to_right(iter->node, to_move);
2149  iter->position += to_move;
2150  return false;
2151  }
2152  }
2153  return false;
2154 }
2155 
2156 template <typename P>
2157 void btree<P>::try_shrink() {
2158  if (root()->count() > 0) {
2159  return;
2160  }
2161  // Deleted the last item on the root node, shrink the height of the tree.
2162  if (root()->leaf()) {
2163  assert(size() == 0);
2164  delete_leaf_node(root());
2165  *mutable_root() = NULL;
2166  } else {
2167  node_type *child = root()->child(0);
2168  if (child->leaf()) {
2169  // The child is a leaf node so simply make it the root node in the tree.
2170  child->make_root();
2171  delete_internal_root_node();
2172  *mutable_root() = child;
2173  } else {
2174  // The child is an internal node. We want to keep the existing root node
2175  // so we move all of the values from the child node into the existing
2176  // (empty) root node.
2177  child->swap(root());
2178  delete_internal_node(child);
2179  }
2180  }
2181 }
2182 
2183 template <typename P> template <typename IterType>
2184 inline IterType btree<P>::internal_last(IterType iter) {
2185  while (iter.node && iter.position == iter.node->count()) {
2186  iter.position = iter.node->position();
2187  iter.node = iter.node->parent();
2188  if (iter.node->leaf()) {
2189  iter.node = NULL;
2190  }
2191  }
2192  return iter;
2193 }
2194 
2195 template <typename P>
2196 inline typename btree<P>::iterator
2197 btree<P>::internal_insert(iterator iter, const value_type &v) {
2198  if (!iter.node->leaf()) {
2199  // We can't insert on an internal node. Instead, we'll insert after the
2200  // previous value which is guaranteed to be on a leaf node.
2201  --iter;
2202  ++iter.position;
2203  }
2204  if (iter.node->count() == iter.node->max_count()) {
2205  // Make room in the leaf for the new item.
2206  if (iter.node->max_count() < kNodeValues) {
2207  // Insertion into the root where the root is smaller that the full node
2208  // size. Simply grow the size of the root node.
2209  assert(iter.node == root());
2210  iter.node = new_leaf_root_node(
2211  std::min<int>(kNodeValues, 2 * iter.node->max_count()));
2212  iter.node->swap(root());
2213  delete_leaf_node(root());
2214  *mutable_root() = iter.node;
2215  } else {
2216  rebalance_or_split(&iter);
2217  ++*mutable_size();
2218  }
2219  } else if (!root()->leaf()) {
2220  ++*mutable_size();
2221  }
2222  iter.node->insert_value(iter.position, v);
2223  return iter;
2224 }
2225 
2226 template <typename P> template <typename IterType>
2227 inline bpair<IterType, int> btree<P>::internal_locate(
2228  const key_type &key, IterType iter) const {
2229  return internal_locate_type::dispatch(key, *this, iter);
2230 }
2231 
2232 template <typename P> template <typename IterType>
2233 inline bpair<IterType, int> btree<P>::internal_locate_plain_compare(
2234  const key_type &key, IterType iter) const {
2235  for (;;) {
2236  iter.position = iter.node->lower_bound(key, key_comp());
2237  if (iter.node->leaf()) {
2238  break;
2239  }
2240  iter.node = iter.node->child(iter.position);
2241  }
2242  return make_bpair(iter, 0);
2243 }
2244 
2245 template <typename P> template <typename IterType>
2246 inline bpair<IterType, int> btree<P>::internal_locate_compare_to(
2247  const key_type &key, IterType iter) const {
2248  for (;;) {
2249  int res = iter.node->lower_bound(key, key_comp());
2250  iter.position = res & kMatchMask;
2251  if (res & kExactMatch) {
2252  return make_bpair(iter, static_cast<int>(kExactMatch));
2253  }
2254  if (iter.node->leaf()) {
2255  break;
2256  }
2257  iter.node = iter.node->child(iter.position);
2258  }
2259  return make_bpair(iter, -kExactMatch);
2260 }
2261 
2262 template <typename P> template <typename IterType>
2263 IterType btree<P>::internal_lower_bound(
2264  const key_type &key, IterType iter) const {
2265  if (iter.node) {
2266  for (;;) {
2267  iter.position =
2268  iter.node->lower_bound(key, key_comp()) & kMatchMask;
2269  if (iter.node->leaf()) {
2270  break;
2271  }
2272  iter.node = iter.node->child(iter.position);
2273  }
2274  iter = internal_last(iter);
2275  }
2276  return iter;
2277 }
2278 
2279 template <typename P> template <typename IterType>
2280 IterType btree<P>::internal_upper_bound(
2281  const key_type &key, IterType iter) const {
2282  if (iter.node) {
2283  for (;;) {
2284  iter.position = iter.node->upper_bound(key, key_comp());
2285  if (iter.node->leaf()) {
2286  break;
2287  }
2288  iter.node = iter.node->child(iter.position);
2289  }
2290  iter = internal_last(iter);
2291  }
2292  return iter;
2293 }
2294 
2295 template <typename P> template <typename IterType>
2296 IterType btree<P>::internal_find_unique(
2297  const key_type &key, IterType iter) const {
2298  if (iter.node) {
2299  bpair<IterType, int> res = internal_locate(key, iter);
2300  if (res.second == kExactMatch) {
2301  return res.first;
2302  }
2303  if (!res.second) {
2304  iter = internal_last(res.first);
2305  if (iter.node && !compare_keys(key, iter.key())) {
2306  return iter;
2307  }
2308  }
2309  }
2310  return IterType(NULL, 0);
2311 }
2312 
2313 template <typename P> template <typename IterType>
2314 IterType btree<P>::internal_find_multi(
2315  const key_type &key, IterType iter) const {
2316  if (iter.node) {
2317  iter = internal_lower_bound(key, iter);
2318  if (iter.node) {
2319  iter = internal_last(iter);
2320  if (iter.node && !compare_keys(key, iter.key())) {
2321  return iter;
2322  }
2323  }
2324  }
2325  return IterType(NULL, 0);
2326 }
2327 
2328 template <typename P>
2329 void btree<P>::internal_clear(node_type *node) {
2330  if (!node->leaf()) {
2331  for (int i = 0; i <= node->count(); ++i) {
2332  internal_clear(node->child(i));
2333  }
2334  if (node == root()) {
2335  delete_internal_root_node();
2336  } else {
2337  delete_internal_node(node);
2338  }
2339  } else {
2340  delete_leaf_node(node);
2341  }
2342 }
2343 
2344 #if defined COMPILE_BTREE_DUMP
2345 template <typename P>
2346 void btree<P>::internal_dump(
2347  std::ostream &os, const node_type *node, int level) const {
2348  for (int i = 0; i < node->count(); ++i) {
2349  if (!node->leaf()) {
2350  internal_dump(os, node->child(i), level + 1);
2351  }
2352  for (int j = 0; j < level; ++j) {
2353  os << " ";
2354  }
2355  os << node->key(i) << " [" << level << "]\n";
2356  }
2357  if (!node->leaf()) {
2358  internal_dump(os, node->child(node->count()), level + 1);
2359  }
2360 }
2361 #endif
2362 
2363 template <typename P>
2364 int btree<P>::internal_verify(
2365  const node_type *node, const key_type *lo, const key_type *hi) const {
2366  assert(node->count() > 0);
2367  assert(node->count() <= node->max_count());
2368  if (lo) {
2369  assert(!compare_keys(node->key(0), *lo));
2370  }
2371  if (hi) {
2372  assert(!compare_keys(*hi, node->key(node->count() - 1)));
2373  }
2374  for (int i = 1; i < node->count(); ++i) {
2375  assert(!compare_keys(node->key(i), node->key(i - 1)));
2376  }
2377  int count = node->count();
2378  if (!node->leaf()) {
2379  for (int i = 0; i <= node->count(); ++i) {
2380  assert(node->child(i) != NULL);
2381  assert(node->child(i)->parent() == node);
2382  assert(node->child(i)->position() == i);
2383  count += internal_verify(
2384  node->child(i),
2385  (i == 0) ? lo : &node->key(i - 1),
2386  (i == node->count()) ? hi : &node->key(i));
2387  }
2388  }
2389  return count;
2390 }
2391 
2393 
2394 #pragma pop_macro ("min")
2395 #pragma pop_macro ("max")
2396 
2397 #endif // BENTLEY_UTIL_BTREE_BTREE_H__
2398 
bool empty() const
Definition: stdcxx/bstdmap.h:210
reverse_iterator rend()
Definition: stdcxx/bstdmap.h:202
iterator lower_bound(const key_type &__x)
Definition: stdcxx/bstdmap.h:281
void operator+=(DPoint3d &point, DVec3d const &vector)
operator overload for in-place addition of a point plus a vector
#define min(x, y)
Definition: MathUtils.h:21
first_type first
Definition: bpair.h:78
iterator begin()
Definition: stdcxx/bstdmap.h:178
iterator upper_bound(const key_type &__x)
Definition: stdcxx/bstdmap.h:285
iterator end()
Definition: stdcxx/bstdmap.h:186
key_compare key_comp() const
Definition: stdcxx/bstdmap.h:261
size_type count(const key_type &__x) const
Definition: stdcxx/bstdmap.h:277
void swap(basic_string< _CharT, _Traits, _Allocator > &__a, basic_string< _CharT, _Traits, _Allocator > &__b)
Definition: basic_string.h:1396
bpair< iterator, iterator > equal_range(const key_type &__x)
Definition: stdcxx/bstdmap.h:297
iterator erase(iterator __it)
Definition: stdcxx/bstdmap.h:242
#define NULL
Definition: Bentley.h:157
bstdmap & operator=(const bstdmap &__rhs)
Definition: stdcxx/bstdmap.h:170
#define BEGIN_BENTLEY_NAMESPACE
Definition: Bentley.r.h:24
#define max(x, y)
Definition: MathUtils.h:24
A template that has many of the capabilities of std::pair.
Definition: bpair.h:73
size_type max_size() const
Definition: stdcxx/bstdmap.h:218
bpair< _TypeT, _TypeU > make_bpair(_TypeT __x, _TypeU __y)
Definition: bpair.h:177
second_type second
Definition: bpair.h:79
DVec3d operator*(Transform const &transform, DVec3d const &vector)
operator overload for multiplication of a transform and a vector li>The vector appears on the left as...
bool operator!=(const BentleyAllocator< _Ty > &, const BentleyAllocator< _Other > &)
Definition: BentleyAllocator.h:152
Bstdcxx::basic_string< wchar_t, std::char_traits< wchar_t >, BentleyAllocator< wchar_t > > bwstring
Definition: WString.h:23
unsigned short uint16_t
Definition: Bentley.r.h:91
#define END_BENTLEY_NAMESPACE
Definition: Bentley.r.h:25
void clear()
Definition: stdcxx/bstdmap.h:257
reverse_iterator rbegin()
Definition: stdcxx/bstdmap.h:194
size_type size() const
Definition: stdcxx/bstdmap.h:214
Bstdcxx::basic_string< char, std::char_traits< char >, BentleyAllocator< char > > bastring
Definition: WString.h:22
unsigned char uint8_t
Definition: Bentley.r.h:89
bool operator==(const BentleyAllocator< _Ty > &, const BentleyAllocator< _Other > &)
Definition: BentleyAllocator.h:146

Copyright © 2017 Bentley Systems, Incorporated. All rights reserved.