You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

btree.h 139 kB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651265226532654265526562657265826592660266126622663266426652666266726682669267026712672267326742675267626772678267926802681268226832684268526862687268826892690269126922693269426952696269726982699270027012702270327042705270627072708270927102711271227132714271527162717271827192720272127222723272427252726272727282729273027312732273327342735273627372738273927402741274227432744274527462747274827492750275127522753275427552756275727582759276027612762276327642765276627672768276927702771277227732774277527762777277827792780278127822783278427852786278727882789279027912792279327942795279627972798279928002801280228032804280528062807280828092810281128122813281428152816281728182819282028212822282328242825282628272828282928302831283228332834283528362837283828392840284128422843284428452846284728482849285028512852285328542855285628572858285928602861286228632864286528662867286828692870287128722873287428752876287728782879288028812882288328842885288628872888288928902891289228932894289528962897289828992900290129022903290429052906290729082909291029112912291329142915291629172918291929202921292229232924292529262927292829292930293129322933293429352936293729382939294029412942294329442945294629472948294929502951295229532954295529562957295829592960296129622963296429652966296729682969297029712972297329742975297629772978297929802981298229832984298529862987298829892990299129922993299429952996299729982999300030013002300330043005300630073008300930103011301230133014301530163017301830193020302130223023302430253026302730283029303030313032303330343035303630373038303930403041304230433044304530463047304830493050305130523053305430553056305730583059306030613062306330643065306630673068306930703071307230733074307530763077307830793080308130823083308430853086308730883089309030913092309330943095309630973098309931003101310231033104310531063107310831093110311131123113311431153116311731183119312031213122312331243125312631273128312931303131313231333134313531363137313831393140314131423143314431453146314731483149315031513152315331543155315631573158315931603161316231633164316531663167316831693170317131723173317431753176317731783179318031813182318331843185318631873188318931903191319231933194319531963197319831993200320132023203320432053206320732083209321032113212321332143215321632173218321932203221322232233224322532263227322832293230323132323233323432353236323732383239324032413242324332443245324632473248324932503251325232533254325532563257325832593260326132623263326432653266326732683269327032713272327332743275327632773278327932803281328232833284328532863287328832893290329132923293329432953296329732983299330033013302330333043305330633073308330933103311331233133314331533163317331833193320332133223323332433253326332733283329333033313332333333343335333633373338333933403341334233433344334533463347334833493350335133523353335433553356335733583359336033613362336333643365336633673368336933703371337233733374337533763377337833793380338133823383338433853386338733883389339033913392339333943395339633973398339934003401340234033404340534063407340834093410341134123413341434153416341734183419
  1. // Copyright 2018 The Abseil Authors.
  2. //
  3. // Licensed under the Apache License, Version 2.0 (the "License");
  4. // you may not use this file except in compliance with the License.
  5. // You may obtain a copy of the License at
  6. //
  7. // https://www.apache.org/licenses/LICENSE-2.0
  8. //
  9. // Unless required by applicable law or agreed to in writing, software
  10. // distributed under the License is distributed on an "AS IS" BASIS,
  11. // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. // See the License for the specific language governing permissions and
  13. // limitations under the License.
  14. // A btree implementation of the STL set and map interfaces. A btree is smaller
  15. // and generally also faster than STL set/map (refer to the benchmarks below).
  16. // The red-black tree implementation of STL set/map has an overhead of 3
  17. // pointers (left, right and parent) plus the node color information for each
  18. // stored value. So a set<int32_t> consumes 40 bytes for each value stored in
  19. // 64-bit mode. This btree implementation stores multiple values on fixed
  20. // size nodes (usually 256 bytes) and doesn't store child pointers for leaf
  21. // nodes. The result is that a btree_set<int32_t> may use much less memory per
  22. // stored value. For the random insertion benchmark in btree_bench.cc, a
  23. // btree_set<int32_t> with node-size of 256 uses 5.1 bytes per stored value.
  24. //
  25. // The packing of multiple values on to each node of a btree has another effect
  26. // besides better space utilization: better cache locality due to fewer cache
  27. // lines being accessed. Better cache locality translates into faster
  28. // operations.
  29. //
  30. // CAVEATS
  31. //
  32. // Insertions and deletions on a btree can cause splitting, merging or
  33. // rebalancing of btree nodes. And even without these operations, insertions
  34. // and deletions on a btree will move values around within a node. In both
  35. // cases, the result is that insertions and deletions can invalidate iterators
  36. // pointing to values other than the one being inserted/deleted. Therefore, this
  37. // container does not provide pointer stability. This is notably different from
  38. // STL set/map which takes care to not invalidate iterators on insert/erase
  39. // except, of course, for iterators pointing to the value being erased. A
  40. // partial workaround when erasing is available: erase() returns an iterator
  41. // pointing to the item just after the one that was erased (or end() if none
  42. // exists).
  43. #ifndef ABSL_CONTAINER_INTERNAL_BTREE_H_
  44. #define ABSL_CONTAINER_INTERNAL_BTREE_H_
  45. #include <algorithm>
  46. #include <cassert>
  47. #include <cstddef>
  48. #include <cstdint>
  49. #include <cstring>
  50. #include <functional>
  51. #include <iterator>
  52. #include <limits>
  53. #include <new>
  54. #include <string>
  55. #include <type_traits>
  56. #include <utility>
  57. #include "absl/base/internal/raw_logging.h"
  58. #include "absl/base/macros.h"
  59. #include "absl/container/internal/common.h"
  60. #include "absl/container/internal/compressed_tuple.h"
  61. #include "absl/container/internal/container_memory.h"
  62. #include "absl/container/internal/layout.h"
  63. #include "absl/memory/memory.h"
  64. #include "absl/meta/type_traits.h"
  65. #include "absl/strings/cord.h"
  66. #include "absl/strings/string_view.h"
  67. #include "absl/types/compare.h"
  68. #include "absl/utility/utility.h"
  69. namespace absl
  70. {
  71. ABSL_NAMESPACE_BEGIN
  72. namespace container_internal
  73. {
  74. #ifdef ABSL_BTREE_ENABLE_GENERATIONS
  75. #error ABSL_BTREE_ENABLE_GENERATIONS cannot be directly set
  76. #elif defined(ABSL_HAVE_ADDRESS_SANITIZER) || \
  77. defined(ABSL_HAVE_MEMORY_SANITIZER)
  78. // When compiled in sanitizer mode, we add generation integers to the nodes and
  79. // iterators. When iterators are used, we validate that the container has not
  80. // been mutated since the iterator was constructed.
  81. #define ABSL_BTREE_ENABLE_GENERATIONS
  82. #endif
  83. template<typename Compare, typename T, typename U>
  84. using compare_result_t = absl::result_of_t<const Compare(const T&, const U&)>;
  85. // A helper class that indicates if the Compare parameter is a key-compare-to
  86. // comparator.
  87. template<typename Compare, typename T>
  88. using btree_is_key_compare_to =
  89. std::is_convertible<compare_result_t<Compare, T, T>, absl::weak_ordering>;
  90. struct StringBtreeDefaultLess
  91. {
  92. using is_transparent = void;
  93. StringBtreeDefaultLess() = default;
  94. // Compatibility constructor.
  95. StringBtreeDefaultLess(std::less<std::string>)
  96. {
  97. } // NOLINT
  98. StringBtreeDefaultLess(std::less<absl::string_view>)
  99. {
  100. } // NOLINT
  101. // Allow converting to std::less for use in key_comp()/value_comp().
  102. explicit operator std::less<std::string>() const
  103. {
  104. return {};
  105. }
  106. explicit operator std::less<absl::string_view>() const
  107. {
  108. return {};
  109. }
  110. explicit operator std::less<absl::Cord>() const
  111. {
  112. return {};
  113. }
  114. absl::weak_ordering operator()(absl::string_view lhs, absl::string_view rhs) const
  115. {
  116. return compare_internal::compare_result_as_ordering(lhs.compare(rhs));
  117. }
  118. StringBtreeDefaultLess(std::less<absl::Cord>)
  119. {
  120. } // NOLINT
  121. absl::weak_ordering operator()(const absl::Cord& lhs, const absl::Cord& rhs) const
  122. {
  123. return compare_internal::compare_result_as_ordering(lhs.Compare(rhs));
  124. }
  125. absl::weak_ordering operator()(const absl::Cord& lhs, absl::string_view rhs) const
  126. {
  127. return compare_internal::compare_result_as_ordering(lhs.Compare(rhs));
  128. }
  129. absl::weak_ordering operator()(absl::string_view lhs, const absl::Cord& rhs) const
  130. {
  131. return compare_internal::compare_result_as_ordering(-rhs.Compare(lhs));
  132. }
  133. };
  134. struct StringBtreeDefaultGreater
  135. {
  136. using is_transparent = void;
  137. StringBtreeDefaultGreater() = default;
  138. StringBtreeDefaultGreater(std::greater<std::string>)
  139. {
  140. } // NOLINT
  141. StringBtreeDefaultGreater(std::greater<absl::string_view>)
  142. {
  143. } // NOLINT
  144. // Allow converting to std::greater for use in key_comp()/value_comp().
  145. explicit operator std::greater<std::string>() const
  146. {
  147. return {};
  148. }
  149. explicit operator std::greater<absl::string_view>() const
  150. {
  151. return {};
  152. }
  153. explicit operator std::greater<absl::Cord>() const
  154. {
  155. return {};
  156. }
  157. absl::weak_ordering operator()(absl::string_view lhs, absl::string_view rhs) const
  158. {
  159. return compare_internal::compare_result_as_ordering(rhs.compare(lhs));
  160. }
  161. StringBtreeDefaultGreater(std::greater<absl::Cord>)
  162. {
  163. } // NOLINT
  164. absl::weak_ordering operator()(const absl::Cord& lhs, const absl::Cord& rhs) const
  165. {
  166. return compare_internal::compare_result_as_ordering(rhs.Compare(lhs));
  167. }
  168. absl::weak_ordering operator()(const absl::Cord& lhs, absl::string_view rhs) const
  169. {
  170. return compare_internal::compare_result_as_ordering(-lhs.Compare(rhs));
  171. }
  172. absl::weak_ordering operator()(absl::string_view lhs, const absl::Cord& rhs) const
  173. {
  174. return compare_internal::compare_result_as_ordering(rhs.Compare(lhs));
  175. }
  176. };
  177. // See below comments for checked_compare.
  178. template<typename Compare, bool is_class = std::is_class<Compare>::value>
  179. struct checked_compare_base : Compare
  180. {
  181. using Compare::Compare;
  182. explicit checked_compare_base(Compare c) :
  183. Compare(std::move(c))
  184. {
  185. }
  186. const Compare& comp() const
  187. {
  188. return *this;
  189. }
  190. };
  191. template<typename Compare>
  192. struct checked_compare_base<Compare, false>
  193. {
  194. explicit checked_compare_base(Compare c) :
  195. compare(std::move(c))
  196. {
  197. }
  198. const Compare& comp() const
  199. {
  200. return compare;
  201. }
  202. Compare compare;
  203. };
  204. // A mechanism for opting out of checked_compare for use only in btree_test.cc.
  205. struct BtreeTestOnlyCheckedCompareOptOutBase
  206. {
  207. };
  208. // A helper class to adapt the specified comparator for two use cases:
  209. // (1) When using common Abseil string types with common comparison functors,
  210. // convert a boolean comparison into a three-way comparison that returns an
  211. // `absl::weak_ordering`. This helper class is specialized for
  212. // less<std::string>, greater<std::string>, less<string_view>,
  213. // greater<string_view>, less<absl::Cord>, and greater<absl::Cord>.
  214. // (2) Adapt the comparator to diagnose cases of non-strict-weak-ordering (see
  215. // https://en.cppreference.com/w/cpp/named_req/Compare) in debug mode. Whenever
  216. // a comparison is made, we will make assertions to verify that the comparator
  217. // is valid.
  218. template<typename Compare, typename Key>
  219. struct key_compare_adapter
  220. {
  221. // Inherit from checked_compare_base to support function pointers and also
  222. // keep empty-base-optimization (EBO) support for classes.
  223. // Note: we can't use CompressedTuple here because that would interfere
  224. // with the EBO for `btree::rightmost_`. `btree::rightmost_` is itself a
  225. // CompressedTuple and nested `CompressedTuple`s don't support EBO.
  226. // TODO(b/214288561): use CompressedTuple instead once it supports EBO for
  227. // nested `CompressedTuple`s.
  228. struct checked_compare : checked_compare_base<Compare>
  229. {
  230. private:
  231. using Base = typename checked_compare::checked_compare_base;
  232. using Base::comp;
  233. // If possible, returns whether `t` is equivalent to itself. We can only do
  234. // this for `Key`s because we can't be sure that it's safe to call
  235. // `comp()(k, k)` otherwise. Even if SFINAE allows it, there could be a
  236. // compilation failure inside the implementation of the comparison operator.
  237. bool is_self_equivalent(const Key& k) const
  238. {
  239. // Note: this works for both boolean and three-way comparators.
  240. return comp()(k, k) == 0;
  241. }
  242. // If we can't compare `t` with itself, returns true unconditionally.
  243. template<typename T>
  244. bool is_self_equivalent(const T&) const
  245. {
  246. return true;
  247. }
  248. public:
  249. using Base::Base;
  250. checked_compare(Compare comp) :
  251. Base(std::move(comp))
  252. {
  253. } // NOLINT
  254. // Allow converting to Compare for use in key_comp()/value_comp().
  255. explicit operator Compare() const
  256. {
  257. return comp();
  258. }
  259. template<typename T, typename U, absl::enable_if_t<std::is_same<bool, compare_result_t<Compare, T, U>>::value, int> = 0>
  260. bool operator()(const T& lhs, const U& rhs) const
  261. {
  262. // NOTE: if any of these assertions fail, then the comparator does not
  263. // establish a strict-weak-ordering (see
  264. // https://en.cppreference.com/w/cpp/named_req/Compare).
  265. assert(is_self_equivalent(lhs));
  266. assert(is_self_equivalent(rhs));
  267. const bool lhs_comp_rhs = comp()(lhs, rhs);
  268. assert(!lhs_comp_rhs || !comp()(rhs, lhs));
  269. return lhs_comp_rhs;
  270. }
  271. template<
  272. typename T,
  273. typename U,
  274. absl::enable_if_t<std::is_convertible<compare_result_t<Compare, T, U>, absl::weak_ordering>::value, int> = 0>
  275. absl::weak_ordering operator()(const T& lhs, const U& rhs) const
  276. {
  277. // NOTE: if any of these assertions fail, then the comparator does not
  278. // establish a strict-weak-ordering (see
  279. // https://en.cppreference.com/w/cpp/named_req/Compare).
  280. assert(is_self_equivalent(lhs));
  281. assert(is_self_equivalent(rhs));
  282. const absl::weak_ordering lhs_comp_rhs = comp()(lhs, rhs);
  283. #ifndef NDEBUG
  284. const absl::weak_ordering rhs_comp_lhs = comp()(rhs, lhs);
  285. if (lhs_comp_rhs > 0)
  286. {
  287. assert(rhs_comp_lhs < 0 && "lhs_comp_rhs > 0 -> rhs_comp_lhs < 0");
  288. }
  289. else if (lhs_comp_rhs == 0)
  290. {
  291. assert(rhs_comp_lhs == 0 && "lhs_comp_rhs == 0 -> rhs_comp_lhs == 0");
  292. }
  293. else
  294. {
  295. assert(rhs_comp_lhs > 0 && "lhs_comp_rhs < 0 -> rhs_comp_lhs > 0");
  296. }
  297. #endif
  298. return lhs_comp_rhs;
  299. }
  300. };
  301. using type = absl::conditional_t<
  302. std::is_base_of<BtreeTestOnlyCheckedCompareOptOutBase, Compare>::value,
  303. Compare,
  304. checked_compare>;
  305. };
  306. template<>
  307. struct key_compare_adapter<std::less<std::string>, std::string>
  308. {
  309. using type = StringBtreeDefaultLess;
  310. };
  311. template<>
  312. struct key_compare_adapter<std::greater<std::string>, std::string>
  313. {
  314. using type = StringBtreeDefaultGreater;
  315. };
  316. template<>
  317. struct key_compare_adapter<std::less<absl::string_view>, absl::string_view>
  318. {
  319. using type = StringBtreeDefaultLess;
  320. };
  321. template<>
  322. struct key_compare_adapter<std::greater<absl::string_view>, absl::string_view>
  323. {
  324. using type = StringBtreeDefaultGreater;
  325. };
  326. template<>
  327. struct key_compare_adapter<std::less<absl::Cord>, absl::Cord>
  328. {
  329. using type = StringBtreeDefaultLess;
  330. };
  331. template<>
  332. struct key_compare_adapter<std::greater<absl::Cord>, absl::Cord>
  333. {
  334. using type = StringBtreeDefaultGreater;
  335. };
  336. // Detects an 'absl_btree_prefer_linear_node_search' member. This is
  337. // a protocol used as an opt-in or opt-out of linear search.
  338. //
  339. // For example, this would be useful for key types that wrap an integer
  340. // and define their own cheap operator<(). For example:
  341. //
  342. // class K {
  343. // public:
  344. // using absl_btree_prefer_linear_node_search = std::true_type;
  345. // ...
  346. // private:
  347. // friend bool operator<(K a, K b) { return a.k_ < b.k_; }
  348. // int k_;
  349. // };
  350. //
  351. // btree_map<K, V> m; // Uses linear search
  352. //
  353. // If T has the preference tag, then it has a preference.
  354. // Btree will use the tag's truth value.
  355. template<typename T, typename = void>
  356. struct has_linear_node_search_preference : std::false_type
  357. {
  358. };
  359. template<typename T, typename = void>
  360. struct prefers_linear_node_search : std::false_type
  361. {
  362. };
  363. template<typename T>
  364. struct has_linear_node_search_preference<
  365. T,
  366. absl::void_t<typename T::absl_btree_prefer_linear_node_search>> : std::true_type
  367. {
  368. };
  369. template<typename T>
  370. struct prefers_linear_node_search<
  371. T,
  372. absl::void_t<typename T::absl_btree_prefer_linear_node_search>> : T::absl_btree_prefer_linear_node_search
  373. {
  374. };
  375. template<typename Compare, typename Key>
  376. constexpr bool compare_has_valid_result_type()
  377. {
  378. using compare_result_type = compare_result_t<Compare, Key, Key>;
  379. return std::is_same<compare_result_type, bool>::value ||
  380. std::is_convertible<compare_result_type, absl::weak_ordering>::value;
  381. }
  382. template<typename original_key_compare, typename value_type>
  383. class map_value_compare
  384. {
  385. template<typename Params>
  386. friend class btree;
  387. // Note: this `protected` is part of the API of std::map::value_compare. See
  388. // https://en.cppreference.com/w/cpp/container/map/value_compare.
  389. protected:
  390. explicit map_value_compare(original_key_compare c) :
  391. comp(std::move(c))
  392. {
  393. }
  394. original_key_compare comp; // NOLINT
  395. public:
  396. auto operator()(const value_type& lhs, const value_type& rhs) const
  397. -> decltype(comp(lhs.first, rhs.first))
  398. {
  399. return comp(lhs.first, rhs.first);
  400. }
  401. };
  402. template<typename Key, typename Compare, typename Alloc, int TargetNodeSize, bool IsMulti, bool IsMap, typename SlotPolicy>
  403. struct common_params
  404. {
  405. using original_key_compare = Compare;
  406. // If Compare is a common comparator for a string-like type, then we adapt it
  407. // to use heterogeneous lookup and to be a key-compare-to comparator.
  408. // We also adapt the comparator to diagnose invalid comparators in debug mode.
  409. // We disable this when `Compare` is invalid in a way that will cause
  410. // adaptation to fail (having invalid return type) so that we can give a
  411. // better compilation failure in static_assert_validation. If we don't do
  412. // this, then there will be cascading compilation failures that are confusing
  413. // for users.
  414. using key_compare =
  415. absl::conditional_t<!compare_has_valid_result_type<Compare, Key>(), Compare, typename key_compare_adapter<Compare, Key>::type>;
  416. static constexpr bool kIsKeyCompareStringAdapted =
  417. std::is_same<key_compare, StringBtreeDefaultLess>::value ||
  418. std::is_same<key_compare, StringBtreeDefaultGreater>::value;
  419. static constexpr bool kIsKeyCompareTransparent =
  420. IsTransparent<original_key_compare>::value ||
  421. kIsKeyCompareStringAdapted;
  422. static constexpr bool kEnableGenerations =
  423. #ifdef ABSL_BTREE_ENABLE_GENERATIONS
  424. true;
  425. #else
  426. false;
  427. #endif
  428. // A type which indicates if we have a key-compare-to functor or a plain old
  429. // key-compare functor.
  430. using is_key_compare_to = btree_is_key_compare_to<key_compare, Key>;
  431. using allocator_type = Alloc;
  432. using key_type = Key;
  433. using size_type = size_t;
  434. using difference_type = ptrdiff_t;
  435. using slot_policy = SlotPolicy;
  436. using slot_type = typename slot_policy::slot_type;
  437. using value_type = typename slot_policy::value_type;
  438. using init_type = typename slot_policy::mutable_value_type;
  439. using pointer = value_type*;
  440. using const_pointer = const value_type*;
  441. using reference = value_type&;
  442. using const_reference = const value_type&;
  443. using value_compare =
  444. absl::conditional_t<IsMap, map_value_compare<original_key_compare, value_type>, original_key_compare>;
  445. using is_map_container = std::integral_constant<bool, IsMap>;
  446. // For the given lookup key type, returns whether we can have multiple
  447. // equivalent keys in the btree. If this is a multi-container, then we can.
  448. // Otherwise, we can have multiple equivalent keys only if all of the
  449. // following conditions are met:
  450. // - The comparator is transparent.
  451. // - The lookup key type is not the same as key_type.
  452. // - The comparator is not a StringBtreeDefault{Less,Greater} comparator
  453. // that we know has the same equivalence classes for all lookup types.
  454. template<typename LookupKey>
  455. constexpr static bool can_have_multiple_equivalent_keys()
  456. {
  457. return IsMulti || (IsTransparent<key_compare>::value &&
  458. !std::is_same<LookupKey, Key>::value &&
  459. !kIsKeyCompareStringAdapted);
  460. }
  461. enum
  462. {
  463. kTargetNodeSize = TargetNodeSize,
  464. // Upper bound for the available space for slots. This is largest for leaf
  465. // nodes, which have overhead of at least a pointer + 4 bytes (for storing
  466. // 3 field_types and an enum).
  467. kNodeSlotSpace =
  468. TargetNodeSize - /*minimum overhead=*/(sizeof(void*) + 4),
  469. };
  470. // This is an integral type large enough to hold as many slots as will fit a
  471. // node of TargetNodeSize bytes.
  472. using node_count_type =
  473. absl::conditional_t<(kNodeSlotSpace / sizeof(slot_type) > (std::numeric_limits<uint8_t>::max)()), uint16_t, uint8_t>; // NOLINT
  474. // The following methods are necessary for passing this struct as PolicyTraits
  475. // for node_handle and/or are used within btree.
  476. static value_type& element(slot_type* slot)
  477. {
  478. return slot_policy::element(slot);
  479. }
  480. static const value_type& element(const slot_type* slot)
  481. {
  482. return slot_policy::element(slot);
  483. }
  484. template<class... Args>
  485. static void construct(Alloc* alloc, slot_type* slot, Args&&... args)
  486. {
  487. slot_policy::construct(alloc, slot, std::forward<Args>(args)...);
  488. }
  489. static void construct(Alloc* alloc, slot_type* slot, slot_type* other)
  490. {
  491. slot_policy::construct(alloc, slot, other);
  492. }
  493. static void destroy(Alloc* alloc, slot_type* slot)
  494. {
  495. slot_policy::destroy(alloc, slot);
  496. }
  497. static void transfer(Alloc* alloc, slot_type* new_slot, slot_type* old_slot)
  498. {
  499. slot_policy::transfer(alloc, new_slot, old_slot);
  500. }
  501. };
  502. // An adapter class that converts a lower-bound compare into an upper-bound
  503. // compare. Note: there is no need to make a version of this adapter specialized
  504. // for key-compare-to functors because the upper-bound (the first value greater
  505. // than the input) is never an exact match.
  506. template<typename Compare>
  507. struct upper_bound_adapter
  508. {
  509. explicit upper_bound_adapter(const Compare& c) :
  510. comp(c)
  511. {
  512. }
  513. template<typename K1, typename K2>
  514. bool operator()(const K1& a, const K2& b) const
  515. {
  516. // Returns true when a is not greater than b.
  517. return !compare_internal::compare_result_as_less_than(comp(b, a));
  518. }
  519. private:
  520. Compare comp;
  521. };
  522. enum class MatchKind : uint8_t
  523. {
  524. kEq,
  525. kNe
  526. };
  527. template<typename V, bool IsCompareTo>
  528. struct SearchResult
  529. {
  530. V value;
  531. MatchKind match;
  532. static constexpr bool HasMatch()
  533. {
  534. return true;
  535. }
  536. bool IsEq() const
  537. {
  538. return match == MatchKind::kEq;
  539. }
  540. };
  541. // When we don't use CompareTo, `match` is not present.
  542. // This ensures that callers can't use it accidentally when it provides no
  543. // useful information.
  544. template<typename V>
  545. struct SearchResult<V, false>
  546. {
  547. SearchResult()
  548. {
  549. }
  550. explicit SearchResult(V v) :
  551. value(v)
  552. {
  553. }
  554. SearchResult(V v, MatchKind /*match*/) :
  555. value(v)
  556. {
  557. }
  558. V value;
  559. static constexpr bool HasMatch()
  560. {
  561. return false;
  562. }
  563. static constexpr bool IsEq()
  564. {
  565. return false;
  566. }
  567. };
  568. // A node in the btree holding. The same node type is used for both internal
  569. // and leaf nodes in the btree, though the nodes are allocated in such a way
  570. // that the children array is only valid in internal nodes.
  571. template<typename Params>
  572. class btree_node
  573. {
  574. using is_key_compare_to = typename Params::is_key_compare_to;
  575. using field_type = typename Params::node_count_type;
  576. using allocator_type = typename Params::allocator_type;
  577. using slot_type = typename Params::slot_type;
  578. using original_key_compare = typename Params::original_key_compare;
  579. public:
  580. using params_type = Params;
  581. using key_type = typename Params::key_type;
  582. using value_type = typename Params::value_type;
  583. using pointer = typename Params::pointer;
  584. using const_pointer = typename Params::const_pointer;
  585. using reference = typename Params::reference;
  586. using const_reference = typename Params::const_reference;
  587. using key_compare = typename Params::key_compare;
  588. using size_type = typename Params::size_type;
  589. using difference_type = typename Params::difference_type;
  590. // Btree decides whether to use linear node search as follows:
  591. // - If the comparator expresses a preference, use that.
  592. // - If the key expresses a preference, use that.
  593. // - If the key is arithmetic and the comparator is std::less or
  594. // std::greater, choose linear.
  595. // - Otherwise, choose binary.
  596. // TODO(ezb): Might make sense to add condition(s) based on node-size.
  597. using use_linear_search = std::integral_constant<
  598. bool,
  599. has_linear_node_search_preference<original_key_compare>::value ? prefers_linear_node_search<original_key_compare>::value : has_linear_node_search_preference<key_type>::value ? prefers_linear_node_search<key_type>::value :
  600. std::is_arithmetic<key_type>::value && (std::is_same<std::less<key_type>, original_key_compare>::value || std::is_same<std::greater<key_type>, original_key_compare>::value)>;
  601. // This class is organized by absl::container_internal::Layout as if it had
  602. // the following structure:
  603. // // A pointer to the node's parent.
  604. // btree_node *parent;
  605. //
  606. // // When ABSL_BTREE_ENABLE_GENERATIONS is defined, we also have a
  607. // // generation integer in order to check that when iterators are
  608. // // used, they haven't been invalidated already. Only the generation on
  609. // // the root is used, but we have one on each node because whether a node
  610. // // is root or not can change.
  611. // uint32_t generation;
  612. //
  613. // // The position of the node in the node's parent.
  614. // field_type position;
  615. // // The index of the first populated value in `values`.
  616. // // TODO(ezb): right now, `start` is always 0. Update insertion/merge
  617. // // logic to allow for floating storage within nodes.
  618. // field_type start;
  619. // // The index after the last populated value in `values`. Currently, this
  620. // // is the same as the count of values.
  621. // field_type finish;
  622. // // The maximum number of values the node can hold. This is an integer in
  623. // // [1, kNodeSlots] for root leaf nodes, kNodeSlots for non-root leaf
  624. // // nodes, and kInternalNodeMaxCount (as a sentinel value) for internal
  625. // // nodes (even though there are still kNodeSlots values in the node).
  626. // // TODO(ezb): make max_count use only 4 bits and record log2(capacity)
  627. // // to free extra bits for is_root, etc.
  628. // field_type max_count;
  629. //
  630. // // The array of values. The capacity is `max_count` for leaf nodes and
  631. // // kNodeSlots for internal nodes. Only the values in
  632. // // [start, finish) have been initialized and are valid.
  633. // slot_type values[max_count];
  634. //
  635. // // The array of child pointers. The keys in children[i] are all less
  636. // // than key(i). The keys in children[i + 1] are all greater than key(i).
  637. // // There are 0 children for leaf nodes and kNodeSlots + 1 children for
  638. // // internal nodes.
  639. // btree_node *children[kNodeSlots + 1];
  640. //
  641. // This class is only constructed by EmptyNodeType. Normally, pointers to the
  642. // layout above are allocated, cast to btree_node*, and de-allocated within
  643. // the btree implementation.
  644. ~btree_node() = default;
  645. btree_node(btree_node const&) = delete;
  646. btree_node& operator=(btree_node const&) = delete;
  647. // Public for EmptyNodeType.
  648. constexpr static size_type Alignment()
  649. {
  650. static_assert(LeafLayout(1).Alignment() == InternalLayout().Alignment(), "Alignment of all nodes must be equal.");
  651. return InternalLayout().Alignment();
  652. }
  653. protected:
  654. btree_node() = default;
  655. private:
  656. using layout_type =
  657. absl::container_internal::Layout<btree_node*, uint32_t, field_type, slot_type, btree_node*>;
  658. constexpr static size_type SizeWithNSlots(size_type n)
  659. {
  660. return layout_type(
  661. /*parent*/ 1,
  662. /*generation*/ params_type::kEnableGenerations ? 1 : 0,
  663. /*position, start, finish, max_count*/ 4,
  664. /*slots*/ n,
  665. /*children*/ 0
  666. )
  667. .AllocSize();
  668. }
  669. // A lower bound for the overhead of fields other than slots in a leaf node.
  670. constexpr static size_type MinimumOverhead()
  671. {
  672. return SizeWithNSlots(1) - sizeof(slot_type);
  673. }
  674. // Compute how many values we can fit onto a leaf node taking into account
  675. // padding.
  676. constexpr static size_type NodeTargetSlots(const size_type begin, const size_type end)
  677. {
  678. return begin == end ? begin : SizeWithNSlots((begin + end) / 2 + 1) > params_type::kTargetNodeSize ? NodeTargetSlots(begin, (begin + end) / 2) :
  679. NodeTargetSlots((begin + end) / 2 + 1, end);
  680. }
  681. enum
  682. {
  683. kTargetNodeSize = params_type::kTargetNodeSize,
  684. kNodeTargetSlots = NodeTargetSlots(0, params_type::kTargetNodeSize),
  685. // We need a minimum of 3 slots per internal node in order to perform
  686. // splitting (1 value for the two nodes involved in the split and 1 value
  687. // propagated to the parent as the delimiter for the split). For performance
  688. // reasons, we don't allow 3 slots-per-node due to bad worst case occupancy
  689. // of 1/3 (for a node, not a b-tree).
  690. kMinNodeSlots = 4,
  691. kNodeSlots =
  692. kNodeTargetSlots >= kMinNodeSlots ? kNodeTargetSlots : kMinNodeSlots,
  693. // The node is internal (i.e. is not a leaf node) if and only if `max_count`
  694. // has this value.
  695. kInternalNodeMaxCount = 0,
  696. };
  697. // Leaves can have less than kNodeSlots values.
  698. constexpr static layout_type LeafLayout(const int slot_count = kNodeSlots)
  699. {
  700. return layout_type(
  701. /*parent*/ 1,
  702. /*generation*/ params_type::kEnableGenerations ? 1 : 0,
  703. /*position, start, finish, max_count*/ 4,
  704. /*slots*/ slot_count,
  705. /*children*/ 0
  706. );
  707. }
  708. constexpr static layout_type InternalLayout()
  709. {
  710. return layout_type(
  711. /*parent*/ 1,
  712. /*generation*/ params_type::kEnableGenerations ? 1 : 0,
  713. /*position, start, finish, max_count*/ 4,
  714. /*slots*/ kNodeSlots,
  715. /*children*/ kNodeSlots + 1
  716. );
  717. }
  718. constexpr static size_type LeafSize(const int slot_count = kNodeSlots)
  719. {
  720. return LeafLayout(slot_count).AllocSize();
  721. }
  722. constexpr static size_type InternalSize()
  723. {
  724. return InternalLayout().AllocSize();
  725. }
  726. // N is the index of the type in the Layout definition.
  727. // ElementType<N> is the Nth type in the Layout definition.
  728. template<size_type N>
  729. inline typename layout_type::template ElementType<N>* GetField()
  730. {
  731. // We assert that we don't read from values that aren't there.
  732. assert(N < 4 || is_internal());
  733. return InternalLayout().template Pointer<N>(reinterpret_cast<char*>(this));
  734. }
  735. template<size_type N>
  736. inline const typename layout_type::template ElementType<N>* GetField() const
  737. {
  738. assert(N < 4 || is_internal());
  739. return InternalLayout().template Pointer<N>(
  740. reinterpret_cast<const char*>(this)
  741. );
  742. }
  743. void set_parent(btree_node* p)
  744. {
  745. *GetField<0>() = p;
  746. }
  747. field_type& mutable_finish()
  748. {
  749. return GetField<2>()[2];
  750. }
  751. slot_type* slot(int i)
  752. {
  753. return &GetField<3>()[i];
  754. }
  755. slot_type* start_slot()
  756. {
  757. return slot(start());
  758. }
  759. slot_type* finish_slot()
  760. {
  761. return slot(finish());
  762. }
  763. const slot_type* slot(int i) const
  764. {
  765. return &GetField<3>()[i];
  766. }
  767. void set_position(field_type v)
  768. {
  769. GetField<2>()[0] = v;
  770. }
  771. void set_start(field_type v)
  772. {
  773. GetField<2>()[1] = v;
  774. }
  775. void set_finish(field_type v)
  776. {
  777. GetField<2>()[2] = v;
  778. }
  779. // This method is only called by the node init methods.
  780. void set_max_count(field_type v)
  781. {
  782. GetField<2>()[3] = v;
  783. }
  784. public:
  785. // Whether this is a leaf node or not. This value doesn't change after the
  786. // node is created.
  787. bool is_leaf() const
  788. {
  789. return GetField<2>()[3] != kInternalNodeMaxCount;
  790. }
  791. // Whether this is an internal node or not. This value doesn't change after
  792. // the node is created.
  793. bool is_internal() const
  794. {
  795. return !is_leaf();
  796. }
  797. // Getter for the position of this node in its parent.
  798. field_type position() const
  799. {
  800. return GetField<2>()[0];
  801. }
  802. // Getter for the offset of the first value in the `values` array.
  803. field_type start() const
  804. {
  805. // TODO(ezb): when floating storage is implemented, return GetField<2>()[1];
  806. assert(GetField<2>()[1] == 0);
  807. return 0;
  808. }
  809. // Getter for the offset after the last value in the `values` array.
  810. field_type finish() const
  811. {
  812. return GetField<2>()[2];
  813. }
  814. // Getters for the number of values stored in this node.
  815. field_type count() const
  816. {
  817. assert(finish() >= start());
  818. return finish() - start();
  819. }
  820. field_type max_count() const
  821. {
  822. // Internal nodes have max_count==kInternalNodeMaxCount.
  823. // Leaf nodes have max_count in [1, kNodeSlots].
  824. const field_type max_count = GetField<2>()[3];
  825. return max_count == field_type{kInternalNodeMaxCount} ? field_type{kNodeSlots} : max_count;
  826. }
  827. // Getter for the parent of this node.
  828. btree_node* parent() const
  829. {
  830. return *GetField<0>();
  831. }
  832. // Getter for whether the node is the root of the tree. The parent of the
  833. // root of the tree is the leftmost node in the tree which is guaranteed to
  834. // be a leaf.
  835. bool is_root() const
  836. {
  837. return parent()->is_leaf();
  838. }
  839. void make_root()
  840. {
  841. assert(parent()->is_root());
  842. set_generation(parent()->generation());
  843. set_parent(parent()->parent());
  844. }
  845. // Gets the root node's generation integer, which is the one used by the tree.
  846. uint32_t* get_root_generation() const
  847. {
  848. assert(params_type::kEnableGenerations);
  849. const btree_node* curr = this;
  850. for (; !curr->is_root(); curr = curr->parent())
  851. continue;
  852. return const_cast<uint32_t*>(&curr->GetField<1>()[0]);
  853. }
  854. // Returns the generation for iterator validation.
  855. uint32_t generation() const
  856. {
  857. return params_type::kEnableGenerations ? *get_root_generation() : 0;
  858. }
  859. // Updates generation. Should only be called on a root node or during node
  860. // initialization.
  861. void set_generation(uint32_t generation)
  862. {
  863. if (params_type::kEnableGenerations)
  864. GetField<1>()[0] = generation;
  865. }
  866. // Updates the generation. We do this whenever the node is mutated.
  867. void next_generation()
  868. {
  869. if (params_type::kEnableGenerations)
  870. ++*get_root_generation();
  871. }
  872. // Getters for the key/value at position i in the node.
  873. const key_type& key(int i) const
  874. {
  875. return params_type::key(slot(i));
  876. }
  877. reference value(int i)
  878. {
  879. return params_type::element(slot(i));
  880. }
  881. const_reference value(int i) const
  882. {
  883. return params_type::element(slot(i));
  884. }
  885. // Getters/setter for the child at position i in the node.
  886. btree_node* child(int i) const
  887. {
  888. return GetField<4>()[i];
  889. }
  890. btree_node* start_child() const
  891. {
  892. return child(start());
  893. }
  894. btree_node*& mutable_child(int i)
  895. {
  896. return GetField<4>()[i];
  897. }
  898. void clear_child(int i)
  899. {
  900. absl::container_internal::SanitizerPoisonObject(&mutable_child(i));
  901. }
  902. void set_child(int i, btree_node* c)
  903. {
  904. absl::container_internal::SanitizerUnpoisonObject(&mutable_child(i));
  905. mutable_child(i) = c;
  906. c->set_position(i);
  907. }
  908. void init_child(int i, btree_node* c)
  909. {
  910. set_child(i, c);
  911. c->set_parent(this);
  912. }
  913. // Returns the position of the first value whose key is not less than k.
  914. template<typename K>
  915. SearchResult<int, is_key_compare_to::value> lower_bound(
  916. const K& k, const key_compare& comp
  917. ) const
  918. {
  919. return use_linear_search::value ? linear_search(k, comp) : binary_search(k, comp);
  920. }
  921. // Returns the position of the first value whose key is greater than k.
  922. template<typename K>
  923. int upper_bound(const K& k, const key_compare& comp) const
  924. {
  925. auto upper_compare = upper_bound_adapter<key_compare>(comp);
  926. return use_linear_search::value ? linear_search(k, upper_compare).value : binary_search(k, upper_compare).value;
  927. }
  928. template<typename K, typename Compare>
  929. SearchResult<int, btree_is_key_compare_to<Compare, key_type>::value>
  930. linear_search(const K& k, const Compare& comp) const
  931. {
  932. return linear_search_impl(k, start(), finish(), comp, btree_is_key_compare_to<Compare, key_type>());
  933. }
  934. template<typename K, typename Compare>
  935. SearchResult<int, btree_is_key_compare_to<Compare, key_type>::value>
  936. binary_search(const K& k, const Compare& comp) const
  937. {
  938. return binary_search_impl(k, start(), finish(), comp, btree_is_key_compare_to<Compare, key_type>());
  939. }
  940. // Returns the position of the first value whose key is not less than k using
  941. // linear search performed using plain compare.
  942. template<typename K, typename Compare>
  943. SearchResult<int, false> linear_search_impl(
  944. const K& k, int s, const int e, const Compare& comp, std::false_type /* IsCompareTo */
  945. ) const
  946. {
  947. while (s < e)
  948. {
  949. if (!comp(key(s), k))
  950. {
  951. break;
  952. }
  953. ++s;
  954. }
  955. return SearchResult<int, false>{s};
  956. }
  957. // Returns the position of the first value whose key is not less than k using
  958. // linear search performed using compare-to.
  959. template<typename K, typename Compare>
  960. SearchResult<int, true> linear_search_impl(
  961. const K& k, int s, const int e, const Compare& comp, std::true_type /* IsCompareTo */
  962. ) const
  963. {
  964. while (s < e)
  965. {
  966. const absl::weak_ordering c = comp(key(s), k);
  967. if (c == 0)
  968. {
  969. return {s, MatchKind::kEq};
  970. }
  971. else if (c > 0)
  972. {
  973. break;
  974. }
  975. ++s;
  976. }
  977. return {s, MatchKind::kNe};
  978. }
  979. // Returns the position of the first value whose key is not less than k using
  980. // binary search performed using plain compare.
  981. template<typename K, typename Compare>
  982. SearchResult<int, false> binary_search_impl(
  983. const K& k, int s, int e, const Compare& comp, std::false_type /* IsCompareTo */
  984. ) const
  985. {
  986. while (s != e)
  987. {
  988. const int mid = (s + e) >> 1;
  989. if (comp(key(mid), k))
  990. {
  991. s = mid + 1;
  992. }
  993. else
  994. {
  995. e = mid;
  996. }
  997. }
  998. return SearchResult<int, false>{s};
  999. }
  1000. // Returns the position of the first value whose key is not less than k using
  1001. // binary search performed using compare-to.
  1002. template<typename K, typename CompareTo>
  1003. SearchResult<int, true> binary_search_impl(
  1004. const K& k, int s, int e, const CompareTo& comp, std::true_type /* IsCompareTo */
  1005. ) const
  1006. {
  1007. if (params_type::template can_have_multiple_equivalent_keys<K>())
  1008. {
  1009. MatchKind exact_match = MatchKind::kNe;
  1010. while (s != e)
  1011. {
  1012. const int mid = (s + e) >> 1;
  1013. const absl::weak_ordering c = comp(key(mid), k);
  1014. if (c < 0)
  1015. {
  1016. s = mid + 1;
  1017. }
  1018. else
  1019. {
  1020. e = mid;
  1021. if (c == 0)
  1022. {
  1023. // Need to return the first value whose key is not less than k,
  1024. // which requires continuing the binary search if there could be
  1025. // multiple equivalent keys.
  1026. exact_match = MatchKind::kEq;
  1027. }
  1028. }
  1029. }
  1030. return {s, exact_match};
  1031. }
  1032. else
  1033. { // Can't have multiple equivalent keys.
  1034. while (s != e)
  1035. {
  1036. const int mid = (s + e) >> 1;
  1037. const absl::weak_ordering c = comp(key(mid), k);
  1038. if (c < 0)
  1039. {
  1040. s = mid + 1;
  1041. }
  1042. else if (c > 0)
  1043. {
  1044. e = mid;
  1045. }
  1046. else
  1047. {
  1048. return {mid, MatchKind::kEq};
  1049. }
  1050. }
  1051. return {s, MatchKind::kNe};
  1052. }
  1053. }
  1054. // Emplaces a value at position i, shifting all existing values and
  1055. // children at positions >= i to the right by 1.
  1056. template<typename... Args>
  1057. void emplace_value(size_type i, allocator_type* alloc, Args&&... args);
  1058. // Removes the values at positions [i, i + to_erase), shifting all existing
  1059. // values and children after that range to the left by to_erase. Clears all
  1060. // children between [i, i + to_erase).
  1061. void remove_values(field_type i, field_type to_erase, allocator_type* alloc);
  1062. // Rebalances a node with its right sibling.
  1063. void rebalance_right_to_left(int to_move, btree_node* right, allocator_type* alloc);
  1064. void rebalance_left_to_right(int to_move, btree_node* right, allocator_type* alloc);
  1065. // Splits a node, moving a portion of the node's values to its right sibling.
  1066. void split(int insert_position, btree_node* dest, allocator_type* alloc);
  1067. // Merges a node with its right sibling, moving all of the values and the
  1068. // delimiting key in the parent node onto itself, and deleting the src node.
  1069. void merge(btree_node* src, allocator_type* alloc);
  1070. // Node allocation/deletion routines.
  1071. void init_leaf(int max_count, btree_node* parent)
  1072. {
  1073. set_generation(0);
  1074. set_parent(parent);
  1075. set_position(0);
  1076. set_start(0);
  1077. set_finish(0);
  1078. set_max_count(max_count);
  1079. absl::container_internal::SanitizerPoisonMemoryRegion(
  1080. start_slot(), max_count * sizeof(slot_type)
  1081. );
  1082. }
  1083. void init_internal(btree_node* parent)
  1084. {
  1085. init_leaf(kNodeSlots, parent);
  1086. // Set `max_count` to a sentinel value to indicate that this node is
  1087. // internal.
  1088. set_max_count(kInternalNodeMaxCount);
  1089. absl::container_internal::SanitizerPoisonMemoryRegion(
  1090. &mutable_child(start()), (kNodeSlots + 1) * sizeof(btree_node*)
  1091. );
  1092. }
  1093. static void deallocate(const size_type size, btree_node* node, allocator_type* alloc)
  1094. {
  1095. absl::container_internal::Deallocate<Alignment()>(alloc, node, size);
  1096. }
  1097. // Deletes a node and all of its children.
  1098. static void clear_and_delete(btree_node* node, allocator_type* alloc);
  1099. private:
  1100. template<typename... Args>
  1101. void value_init(const field_type i, allocator_type* alloc, Args&&... args)
  1102. {
  1103. next_generation();
  1104. absl::container_internal::SanitizerUnpoisonObject(slot(i));
  1105. params_type::construct(alloc, slot(i), std::forward<Args>(args)...);
  1106. }
  1107. void value_destroy(const field_type i, allocator_type* alloc)
  1108. {
  1109. next_generation();
  1110. params_type::destroy(alloc, slot(i));
  1111. absl::container_internal::SanitizerPoisonObject(slot(i));
  1112. }
  1113. void value_destroy_n(const field_type i, const field_type n, allocator_type* alloc)
  1114. {
  1115. next_generation();
  1116. for (slot_type *s = slot(i), *end = slot(i + n); s != end; ++s)
  1117. {
  1118. params_type::destroy(alloc, s);
  1119. absl::container_internal::SanitizerPoisonObject(s);
  1120. }
  1121. }
  1122. static void transfer(slot_type* dest, slot_type* src, allocator_type* alloc)
  1123. {
  1124. absl::container_internal::SanitizerUnpoisonObject(dest);
  1125. params_type::transfer(alloc, dest, src);
  1126. absl::container_internal::SanitizerPoisonObject(src);
  1127. }
  1128. // Transfers value from slot `src_i` in `src_node` to slot `dest_i` in `this`.
  1129. void transfer(const size_type dest_i, const size_type src_i, btree_node* src_node, allocator_type* alloc)
  1130. {
  1131. next_generation();
  1132. transfer(slot(dest_i), src_node->slot(src_i), alloc);
  1133. }
  1134. // Transfers `n` values starting at value `src_i` in `src_node` into the
  1135. // values starting at value `dest_i` in `this`.
  1136. void transfer_n(const size_type n, const size_type dest_i, const size_type src_i, btree_node* src_node, allocator_type* alloc)
  1137. {
  1138. next_generation();
  1139. for (slot_type *src = src_node->slot(src_i), *end = src + n, *dest = slot(dest_i);
  1140. src != end;
  1141. ++src, ++dest)
  1142. {
  1143. transfer(dest, src, alloc);
  1144. }
  1145. }
  1146. // Same as above, except that we start at the end and work our way to the
  1147. // beginning.
  1148. void transfer_n_backward(const size_type n, const size_type dest_i, const size_type src_i, btree_node* src_node, allocator_type* alloc)
  1149. {
  1150. next_generation();
  1151. for (slot_type *src = src_node->slot(src_i + n - 1), *end = src - n, *dest = slot(dest_i + n - 1);
  1152. src != end;
  1153. --src, --dest)
  1154. {
  1155. transfer(dest, src, alloc);
  1156. }
  1157. }
  1158. template<typename P>
  1159. friend class btree;
  1160. template<typename N, typename R, typename P>
  1161. friend class btree_iterator;
  1162. friend class BtreeNodePeer;
  1163. friend struct btree_access;
  1164. };
  1165. template<typename Node, typename Reference, typename Pointer>
  1166. class btree_iterator
  1167. {
  1168. using key_type = typename Node::key_type;
  1169. using size_type = typename Node::size_type;
  1170. using params_type = typename Node::params_type;
  1171. using is_map_container = typename params_type::is_map_container;
  1172. using node_type = Node;
  1173. using normal_node = typename std::remove_const<Node>::type;
  1174. using const_node = const Node;
  1175. using normal_pointer = typename params_type::pointer;
  1176. using normal_reference = typename params_type::reference;
  1177. using const_pointer = typename params_type::const_pointer;
  1178. using const_reference = typename params_type::const_reference;
  1179. using slot_type = typename params_type::slot_type;
  1180. using iterator =
  1181. btree_iterator<normal_node, normal_reference, normal_pointer>;
  1182. using const_iterator =
  1183. btree_iterator<const_node, const_reference, const_pointer>;
  1184. public:
  1185. // These aliases are public for std::iterator_traits.
  1186. using difference_type = typename Node::difference_type;
  1187. using value_type = typename params_type::value_type;
  1188. using pointer = Pointer;
  1189. using reference = Reference;
  1190. using iterator_category = std::bidirectional_iterator_tag;
  1191. btree_iterator() :
  1192. btree_iterator(nullptr, -1)
  1193. {
  1194. }
  1195. explicit btree_iterator(Node* n) :
  1196. btree_iterator(n, n->start())
  1197. {
  1198. }
  1199. btree_iterator(Node* n, int p) :
  1200. node_(n),
  1201. position_(p)
  1202. {
  1203. #ifdef ABSL_BTREE_ENABLE_GENERATIONS
  1204. // Use `~uint32_t{}` as a sentinel value for iterator generations so it
  1205. // doesn't match the initial value for the actual generation.
  1206. generation_ = n != nullptr ? n->generation() : ~uint32_t{};
  1207. #endif
  1208. }
  1209. // NOTE: this SFINAE allows for implicit conversions from iterator to
  1210. // const_iterator, but it specifically avoids hiding the copy constructor so
  1211. // that the trivial one will be used when possible.
  1212. template<typename N, typename R, typename P, absl::enable_if_t<std::is_same<btree_iterator<N, R, P>, iterator>::value && std::is_same<btree_iterator, const_iterator>::value, int> = 0>
  1213. btree_iterator(const btree_iterator<N, R, P> other) // NOLINT
  1214. :
  1215. node_(other.node_),
  1216. position_(other.position_)
  1217. {
  1218. #ifdef ABSL_BTREE_ENABLE_GENERATIONS
  1219. generation_ = other.generation_;
  1220. #endif
  1221. }
  1222. bool operator==(const iterator& other) const
  1223. {
  1224. return node_ == other.node_ && position_ == other.position_;
  1225. }
  1226. bool operator==(const const_iterator& other) const
  1227. {
  1228. return node_ == other.node_ && position_ == other.position_;
  1229. }
  1230. bool operator!=(const iterator& other) const
  1231. {
  1232. return node_ != other.node_ || position_ != other.position_;
  1233. }
  1234. bool operator!=(const const_iterator& other) const
  1235. {
  1236. return node_ != other.node_ || position_ != other.position_;
  1237. }
  1238. // Accessors for the key/value the iterator is pointing at.
  1239. reference operator*() const
  1240. {
  1241. ABSL_HARDENING_ASSERT(node_ != nullptr);
  1242. ABSL_HARDENING_ASSERT(node_->start() <= position_);
  1243. ABSL_HARDENING_ASSERT(node_->finish() > position_);
  1244. assert_valid_generation();
  1245. return node_->value(position_);
  1246. }
  1247. pointer operator->() const
  1248. {
  1249. return &operator*();
  1250. }
  1251. btree_iterator& operator++()
  1252. {
  1253. increment();
  1254. return *this;
  1255. }
  1256. btree_iterator& operator--()
  1257. {
  1258. decrement();
  1259. return *this;
  1260. }
  1261. btree_iterator operator++(int)
  1262. {
  1263. btree_iterator tmp = *this;
  1264. ++*this;
  1265. return tmp;
  1266. }
  1267. btree_iterator operator--(int)
  1268. {
  1269. btree_iterator tmp = *this;
  1270. --*this;
  1271. return tmp;
  1272. }
  1273. private:
  1274. friend iterator;
  1275. friend const_iterator;
  1276. template<typename Params>
  1277. friend class btree;
  1278. template<typename Tree>
  1279. friend class btree_container;
  1280. template<typename Tree>
  1281. friend class btree_set_container;
  1282. template<typename Tree>
  1283. friend class btree_map_container;
  1284. template<typename Tree>
  1285. friend class btree_multiset_container;
  1286. template<typename TreeType, typename CheckerType>
  1287. friend class base_checker;
  1288. friend struct btree_access;
  1289. // This SFINAE allows explicit conversions from const_iterator to
  1290. // iterator, but also avoids hiding the copy constructor.
  1291. // NOTE: the const_cast is safe because this constructor is only called by
  1292. // non-const methods and the container owns the nodes.
  1293. template<typename N, typename R, typename P, absl::enable_if_t<std::is_same<btree_iterator<N, R, P>, const_iterator>::value && std::is_same<btree_iterator, iterator>::value, int> = 0>
  1294. explicit btree_iterator(const btree_iterator<N, R, P> other) :
  1295. node_(const_cast<node_type*>(other.node_)),
  1296. position_(other.position_)
  1297. {
  1298. #ifdef ABSL_BTREE_ENABLE_GENERATIONS
  1299. generation_ = other.generation_;
  1300. #endif
  1301. }
  1302. // Increment/decrement the iterator.
  1303. void increment()
  1304. {
  1305. assert_valid_generation();
  1306. if (node_->is_leaf() && ++position_ < node_->finish())
  1307. {
  1308. return;
  1309. }
  1310. increment_slow();
  1311. }
  1312. void increment_slow();
  1313. void decrement()
  1314. {
  1315. assert_valid_generation();
  1316. if (node_->is_leaf() && --position_ >= node_->start())
  1317. {
  1318. return;
  1319. }
  1320. decrement_slow();
  1321. }
  1322. void decrement_slow();
  1323. // Updates the generation. For use internally right before we return an
  1324. // iterator to the user.
  1325. void update_generation()
  1326. {
  1327. #ifdef ABSL_BTREE_ENABLE_GENERATIONS
  1328. if (node_ != nullptr)
  1329. generation_ = node_->generation();
  1330. #endif
  1331. }
  1332. const key_type& key() const
  1333. {
  1334. return node_->key(position_);
  1335. }
  1336. decltype(std::declval<Node*>()->slot(0)) slot()
  1337. {
  1338. return node_->slot(position_);
  1339. }
  1340. void assert_valid_generation() const
  1341. {
  1342. #ifdef ABSL_BTREE_ENABLE_GENERATIONS
  1343. if (node_ != nullptr && node_->generation() != generation_)
  1344. {
  1345. ABSL_INTERNAL_LOG(
  1346. FATAL,
  1347. "Attempting to use an invalidated iterator. The corresponding b-tree "
  1348. "container has been mutated since this iterator was constructed."
  1349. );
  1350. }
  1351. #endif
  1352. }
  1353. // The node in the tree the iterator is pointing at.
  1354. Node* node_;
  1355. // The position within the node of the tree the iterator is pointing at.
  1356. // NOTE: this is an int rather than a field_type because iterators can point
  1357. // to invalid positions (such as -1) in certain circumstances.
  1358. int position_;
  1359. #ifdef ABSL_BTREE_ENABLE_GENERATIONS
  1360. // Used to check that the iterator hasn't been invalidated.
  1361. uint32_t generation_;
  1362. #endif
  1363. };
  1364. template<typename Params>
  1365. class btree
  1366. {
  1367. using node_type = btree_node<Params>;
  1368. using is_key_compare_to = typename Params::is_key_compare_to;
  1369. using field_type = typename node_type::field_type;
  1370. // We use a static empty node for the root/leftmost/rightmost of empty btrees
  1371. // in order to avoid branching in begin()/end().
  1372. struct alignas(node_type::Alignment()) EmptyNodeType : node_type
  1373. {
  1374. using field_type = typename node_type::field_type;
  1375. node_type* parent;
  1376. #ifdef ABSL_BTREE_ENABLE_GENERATIONS
  1377. uint32_t generation = 0;
  1378. #endif
  1379. field_type position = 0;
  1380. field_type start = 0;
  1381. field_type finish = 0;
  1382. // max_count must be != kInternalNodeMaxCount (so that this node is regarded
  1383. // as a leaf node). max_count() is never called when the tree is empty.
  1384. field_type max_count = node_type::kInternalNodeMaxCount + 1;
  1385. #ifdef _MSC_VER
  1386. // MSVC has constexpr code generations bugs here.
  1387. EmptyNodeType() :
  1388. parent(this)
  1389. {
  1390. }
  1391. #else
  1392. constexpr EmptyNodeType(node_type* p) :
  1393. parent(p)
  1394. {
  1395. }
  1396. #endif
  1397. };
  1398. static node_type* EmptyNode()
  1399. {
  1400. #ifdef _MSC_VER
  1401. static EmptyNodeType* empty_node = new EmptyNodeType;
  1402. // This assert fails on some other construction methods.
  1403. assert(empty_node->parent == empty_node);
  1404. return empty_node;
  1405. #else
  1406. static constexpr EmptyNodeType empty_node(
  1407. const_cast<EmptyNodeType*>(&empty_node)
  1408. );
  1409. return const_cast<EmptyNodeType*>(&empty_node);
  1410. #endif
  1411. }
  1412. enum : uint32_t
  1413. {
  1414. kNodeSlots = node_type::kNodeSlots,
  1415. kMinNodeValues = kNodeSlots / 2,
  1416. };
  1417. struct node_stats
  1418. {
  1419. using size_type = typename Params::size_type;
  1420. node_stats(size_type l, size_type i) :
  1421. leaf_nodes(l),
  1422. internal_nodes(i)
  1423. {
  1424. }
  1425. node_stats& operator+=(const node_stats& other)
  1426. {
  1427. leaf_nodes += other.leaf_nodes;
  1428. internal_nodes += other.internal_nodes;
  1429. return *this;
  1430. }
  1431. size_type leaf_nodes;
  1432. size_type internal_nodes;
  1433. };
  1434. public:
  1435. using key_type = typename Params::key_type;
  1436. using value_type = typename Params::value_type;
  1437. using size_type = typename Params::size_type;
  1438. using difference_type = typename Params::difference_type;
  1439. using key_compare = typename Params::key_compare;
  1440. using original_key_compare = typename Params::original_key_compare;
  1441. using value_compare = typename Params::value_compare;
  1442. using allocator_type = typename Params::allocator_type;
  1443. using reference = typename Params::reference;
  1444. using const_reference = typename Params::const_reference;
  1445. using pointer = typename Params::pointer;
  1446. using const_pointer = typename Params::const_pointer;
  1447. using iterator =
  1448. typename btree_iterator<node_type, reference, pointer>::iterator;
  1449. using const_iterator = typename iterator::const_iterator;
  1450. using reverse_iterator = std::reverse_iterator<iterator>;
  1451. using const_reverse_iterator = std::reverse_iterator<const_iterator>;
  1452. using node_handle_type = node_handle<Params, Params, allocator_type>;
  1453. // Internal types made public for use by btree_container types.
  1454. using params_type = Params;
  1455. using slot_type = typename Params::slot_type;
  1456. private:
  1457. // Copies or moves (depending on the template parameter) the values in
  1458. // other into this btree in their order in other. This btree must be empty
  1459. // before this method is called. This method is used in copy construction,
  1460. // copy assignment, and move assignment.
  1461. template<typename Btree>
  1462. void copy_or_move_values_in_order(Btree& other);
  1463. // Validates that various assumptions/requirements are true at compile time.
  1464. constexpr static bool static_assert_validation();
  1465. public:
  1466. btree(const key_compare& comp, const allocator_type& alloc) :
  1467. root_(EmptyNode()),
  1468. rightmost_(comp, alloc, EmptyNode()),
  1469. size_(0)
  1470. {
  1471. }
  1472. btree(const btree& other) :
  1473. btree(other, other.allocator())
  1474. {
  1475. }
  1476. btree(const btree& other, const allocator_type& alloc) :
  1477. btree(other.key_comp(), alloc)
  1478. {
  1479. copy_or_move_values_in_order(other);
  1480. }
  1481. btree(btree&& other) noexcept
  1482. :
  1483. root_(absl::exchange(other.root_, EmptyNode())),
  1484. rightmost_(std::move(other.rightmost_)),
  1485. size_(absl::exchange(other.size_, 0))
  1486. {
  1487. other.mutable_rightmost() = EmptyNode();
  1488. }
  1489. btree(btree&& other, const allocator_type& alloc) :
  1490. btree(other.key_comp(), alloc)
  1491. {
  1492. if (alloc == other.allocator())
  1493. {
  1494. swap(other);
  1495. }
  1496. else
  1497. {
  1498. // Move values from `other` one at a time when allocators are different.
  1499. copy_or_move_values_in_order(other);
  1500. }
  1501. }
  1502. ~btree()
  1503. {
  1504. // Put static_asserts in destructor to avoid triggering them before the type
  1505. // is complete.
  1506. static_assert(static_assert_validation(), "This call must be elided.");
  1507. clear();
  1508. }
  1509. // Assign the contents of other to *this.
  1510. btree& operator=(const btree& other);
  1511. btree& operator=(btree&& other) noexcept;
  1512. iterator begin()
  1513. {
  1514. return iterator(leftmost());
  1515. }
  1516. const_iterator begin() const
  1517. {
  1518. return const_iterator(leftmost());
  1519. }
  1520. iterator end()
  1521. {
  1522. return iterator(rightmost(), rightmost()->finish());
  1523. }
  1524. const_iterator end() const
  1525. {
  1526. return const_iterator(rightmost(), rightmost()->finish());
  1527. }
  1528. reverse_iterator rbegin()
  1529. {
  1530. return reverse_iterator(end());
  1531. }
  1532. const_reverse_iterator rbegin() const
  1533. {
  1534. return const_reverse_iterator(end());
  1535. }
  1536. reverse_iterator rend()
  1537. {
  1538. return reverse_iterator(begin());
  1539. }
  1540. const_reverse_iterator rend() const
  1541. {
  1542. return const_reverse_iterator(begin());
  1543. }
  1544. // Finds the first element whose key is not less than `key`.
  1545. template<typename K>
  1546. iterator lower_bound(const K& key)
  1547. {
  1548. return internal_end(internal_lower_bound(key).value);
  1549. }
  1550. template<typename K>
  1551. const_iterator lower_bound(const K& key) const
  1552. {
  1553. return internal_end(internal_lower_bound(key).value);
  1554. }
  1555. // Finds the first element whose key is not less than `key` and also returns
  1556. // whether that element is equal to `key`.
  1557. template<typename K>
  1558. std::pair<iterator, bool> lower_bound_equal(const K& key) const;
  1559. // Finds the first element whose key is greater than `key`.
  1560. template<typename K>
  1561. iterator upper_bound(const K& key)
  1562. {
  1563. return internal_end(internal_upper_bound(key));
  1564. }
  1565. template<typename K>
  1566. const_iterator upper_bound(const K& key) const
  1567. {
  1568. return internal_end(internal_upper_bound(key));
  1569. }
  1570. // Finds the range of values which compare equal to key. The first member of
  1571. // the returned pair is equal to lower_bound(key). The second member of the
  1572. // pair is equal to upper_bound(key).
  1573. template<typename K>
  1574. std::pair<iterator, iterator> equal_range(const K& key);
  1575. template<typename K>
  1576. std::pair<const_iterator, const_iterator> equal_range(const K& key) const
  1577. {
  1578. return const_cast<btree*>(this)->equal_range(key);
  1579. }
  1580. // Inserts a value into the btree only if it does not already exist. The
  1581. // boolean return value indicates whether insertion succeeded or failed.
  1582. // Requirement: if `key` already exists in the btree, does not consume `args`.
  1583. // Requirement: `key` is never referenced after consuming `args`.
  1584. template<typename K, typename... Args>
  1585. std::pair<iterator, bool> insert_unique(const K& key, Args&&... args);
  1586. // Inserts with hint. Checks to see if the value should be placed immediately
  1587. // before `position` in the tree. If so, then the insertion will take
  1588. // amortized constant time. If not, the insertion will take amortized
  1589. // logarithmic time as if a call to insert_unique() were made.
  1590. // Requirement: if `key` already exists in the btree, does not consume `args`.
  1591. // Requirement: `key` is never referenced after consuming `args`.
  1592. template<typename K, typename... Args>
  1593. std::pair<iterator, bool> insert_hint_unique(iterator position, const K& key, Args&&... args);
  1594. // Insert a range of values into the btree.
  1595. // Note: the first overload avoids constructing a value_type if the key
  1596. // already exists in the btree.
  1597. template<typename InputIterator, typename = decltype(std::declval<const key_compare&>()(params_type::key(*std::declval<InputIterator>()), std::declval<const key_type&>()))>
  1598. void insert_iterator_unique(InputIterator b, InputIterator e, int);
  1599. // We need the second overload for cases in which we need to construct a
  1600. // value_type in order to compare it with the keys already in the btree.
  1601. template<typename InputIterator>
  1602. void insert_iterator_unique(InputIterator b, InputIterator e, char);
  1603. // Inserts a value into the btree.
  1604. template<typename ValueType>
  1605. iterator insert_multi(const key_type& key, ValueType&& v);
  1606. // Inserts a value into the btree.
  1607. template<typename ValueType>
  1608. iterator insert_multi(ValueType&& v)
  1609. {
  1610. return insert_multi(params_type::key(v), std::forward<ValueType>(v));
  1611. }
  1612. // Insert with hint. Check to see if the value should be placed immediately
  1613. // before position in the tree. If it does, then the insertion will take
  1614. // amortized constant time. If not, the insertion will take amortized
  1615. // logarithmic time as if a call to insert_multi(v) were made.
  1616. template<typename ValueType>
  1617. iterator insert_hint_multi(iterator position, ValueType&& v);
  1618. // Insert a range of values into the btree.
  1619. template<typename InputIterator>
  1620. void insert_iterator_multi(InputIterator b, InputIterator e);
  1621. // Erase the specified iterator from the btree. The iterator must be valid
  1622. // (i.e. not equal to end()). Return an iterator pointing to the node after
  1623. // the one that was erased (or end() if none exists).
  1624. // Requirement: does not read the value at `*iter`.
  1625. iterator erase(iterator iter);
  1626. // Erases range. Returns the number of keys erased and an iterator pointing
  1627. // to the element after the last erased element.
  1628. std::pair<size_type, iterator> erase_range(iterator begin, iterator end);
  1629. // Finds an element with key equivalent to `key` or returns `end()` if `key`
  1630. // is not present.
  1631. template<typename K>
  1632. iterator find(const K& key)
  1633. {
  1634. return internal_end(internal_find(key));
  1635. }
  1636. template<typename K>
  1637. const_iterator find(const K& key) const
  1638. {
  1639. return internal_end(internal_find(key));
  1640. }
  1641. // Clear the btree, deleting all of the values it contains.
  1642. void clear();
  1643. // Swaps the contents of `this` and `other`.
  1644. void swap(btree& other);
  1645. const key_compare& key_comp() const noexcept
  1646. {
  1647. return rightmost_.template get<0>();
  1648. }
  1649. template<typename K1, typename K2>
  1650. bool compare_keys(const K1& a, const K2& b) const
  1651. {
  1652. return compare_internal::compare_result_as_less_than(key_comp()(a, b));
  1653. }
  1654. value_compare value_comp() const
  1655. {
  1656. return value_compare(original_key_compare(key_comp()));
  1657. }
  1658. // Verifies the structure of the btree.
  1659. void verify() const;
  1660. // Size routines.
  1661. size_type size() const
  1662. {
  1663. return size_;
  1664. }
  1665. size_type max_size() const
  1666. {
  1667. return (std::numeric_limits<size_type>::max)();
  1668. }
  1669. bool empty() const
  1670. {
  1671. return size_ == 0;
  1672. }
  1673. // The height of the btree. An empty tree will have height 0.
  1674. size_type height() const
  1675. {
  1676. size_type h = 0;
  1677. if (!empty())
  1678. {
  1679. // Count the length of the chain from the leftmost node up to the
  1680. // root. We actually count from the root back around to the level below
  1681. // the root, but the calculation is the same because of the circularity
  1682. // of that traversal.
  1683. const node_type* n = root();
  1684. do
  1685. {
  1686. ++h;
  1687. n = n->parent();
  1688. } while (n != root());
  1689. }
  1690. return h;
  1691. }
  1692. // The number of internal, leaf and total nodes used by the btree.
  1693. size_type leaf_nodes() const
  1694. {
  1695. return internal_stats(root()).leaf_nodes;
  1696. }
  1697. size_type internal_nodes() const
  1698. {
  1699. return internal_stats(root()).internal_nodes;
  1700. }
  1701. size_type nodes() const
  1702. {
  1703. node_stats stats = internal_stats(root());
  1704. return stats.leaf_nodes + stats.internal_nodes;
  1705. }
  1706. // The total number of bytes used by the btree.
  1707. // TODO(b/169338300): update to support node_btree_*.
  1708. size_type bytes_used() const
  1709. {
  1710. node_stats stats = internal_stats(root());
  1711. if (stats.leaf_nodes == 1 && stats.internal_nodes == 0)
  1712. {
  1713. return sizeof(*this) + node_type::LeafSize(root()->max_count());
  1714. }
  1715. else
  1716. {
  1717. return sizeof(*this) + stats.leaf_nodes * node_type::LeafSize() +
  1718. stats.internal_nodes * node_type::InternalSize();
  1719. }
  1720. }
  1721. // The average number of bytes used per value stored in the btree assuming
  1722. // random insertion order.
  1723. static double average_bytes_per_value()
  1724. {
  1725. // The expected number of values per node with random insertion order is the
  1726. // average of the maximum and minimum numbers of values per node.
  1727. const double expected_values_per_node =
  1728. (kNodeSlots + kMinNodeValues) / 2.0;
  1729. return node_type::LeafSize() / expected_values_per_node;
  1730. }
  1731. // The fullness of the btree. Computed as the number of elements in the btree
  1732. // divided by the maximum number of elements a tree with the current number
  1733. // of nodes could hold. A value of 1 indicates perfect space
  1734. // utilization. Smaller values indicate space wastage.
  1735. // Returns 0 for empty trees.
  1736. double fullness() const
  1737. {
  1738. if (empty())
  1739. return 0.0;
  1740. return static_cast<double>(size()) / (nodes() * kNodeSlots);
  1741. }
  1742. // The overhead of the btree structure in bytes per node. Computed as the
  1743. // total number of bytes used by the btree minus the number of bytes used for
  1744. // storing elements divided by the number of elements.
  1745. // Returns 0 for empty trees.
  1746. double overhead() const
  1747. {
  1748. if (empty())
  1749. return 0.0;
  1750. return (bytes_used() - size() * sizeof(value_type)) /
  1751. static_cast<double>(size());
  1752. }
  1753. // The allocator used by the btree.
  1754. allocator_type get_allocator() const
  1755. {
  1756. return allocator();
  1757. }
  1758. private:
  1759. friend struct btree_access;
  1760. // Internal accessor routines.
  1761. node_type* root()
  1762. {
  1763. return root_;
  1764. }
  1765. const node_type* root() const
  1766. {
  1767. return root_;
  1768. }
  1769. node_type*& mutable_root() noexcept
  1770. {
  1771. return root_;
  1772. }
  1773. node_type* rightmost()
  1774. {
  1775. return rightmost_.template get<2>();
  1776. }
  1777. const node_type* rightmost() const
  1778. {
  1779. return rightmost_.template get<2>();
  1780. }
  1781. node_type*& mutable_rightmost() noexcept
  1782. {
  1783. return rightmost_.template get<2>();
  1784. }
  1785. key_compare* mutable_key_comp() noexcept
  1786. {
  1787. return &rightmost_.template get<0>();
  1788. }
  1789. // The leftmost node is stored as the parent of the root node.
  1790. node_type* leftmost()
  1791. {
  1792. return root()->parent();
  1793. }
  1794. const node_type* leftmost() const
  1795. {
  1796. return root()->parent();
  1797. }
  1798. // Allocator routines.
  1799. allocator_type* mutable_allocator() noexcept
  1800. {
  1801. return &rightmost_.template get<1>();
  1802. }
  1803. const allocator_type& allocator() const noexcept
  1804. {
  1805. return rightmost_.template get<1>();
  1806. }
  1807. // Allocates a correctly aligned node of at least size bytes using the
  1808. // allocator.
  1809. node_type* allocate(const size_type size)
  1810. {
  1811. return reinterpret_cast<node_type*>(
  1812. absl::container_internal::Allocate<node_type::Alignment()>(
  1813. mutable_allocator(), size
  1814. )
  1815. );
  1816. }
  1817. // Node creation/deletion routines.
  1818. node_type* new_internal_node(node_type* parent)
  1819. {
  1820. node_type* n = allocate(node_type::InternalSize());
  1821. n->init_internal(parent);
  1822. return n;
  1823. }
  1824. node_type* new_leaf_node(node_type* parent)
  1825. {
  1826. node_type* n = allocate(node_type::LeafSize());
  1827. n->init_leaf(kNodeSlots, parent);
  1828. return n;
  1829. }
  1830. node_type* new_leaf_root_node(const int max_count)
  1831. {
  1832. node_type* n = allocate(node_type::LeafSize(max_count));
  1833. n->init_leaf(max_count, /*parent=*/n);
  1834. return n;
  1835. }
  1836. // Deletion helper routines.
  1837. iterator rebalance_after_delete(iterator iter);
  1838. // Rebalances or splits the node iter points to.
  1839. void rebalance_or_split(iterator* iter);
  1840. // Merges the values of left, right and the delimiting key on their parent
  1841. // onto left, removing the delimiting key and deleting right.
  1842. void merge_nodes(node_type* left, node_type* right);
  1843. // Tries to merge node with its left or right sibling, and failing that,
  1844. // rebalance with its left or right sibling. Returns true if a merge
  1845. // occurred, at which point it is no longer valid to access node. Returns
  1846. // false if no merging took place.
  1847. bool try_merge_or_rebalance(iterator* iter);
  1848. // Tries to shrink the height of the tree by 1.
  1849. void try_shrink();
  1850. iterator internal_end(iterator iter)
  1851. {
  1852. return iter.node_ != nullptr ? iter : end();
  1853. }
  1854. const_iterator internal_end(const_iterator iter) const
  1855. {
  1856. return iter.node_ != nullptr ? iter : end();
  1857. }
  1858. // Emplaces a value into the btree immediately before iter. Requires that
  1859. // key(v) <= iter.key() and (--iter).key() <= key(v).
  1860. template<typename... Args>
  1861. iterator internal_emplace(iterator iter, Args&&... args);
  1862. // Returns an iterator pointing to the first value >= the value "iter" is
  1863. // pointing at. Note that "iter" might be pointing to an invalid location such
  1864. // as iter.position_ == iter.node_->finish(). This routine simply moves iter
  1865. // up in the tree to a valid location. Requires: iter.node_ is non-null.
  1866. template<typename IterType>
  1867. static IterType internal_last(IterType iter);
  1868. // Returns an iterator pointing to the leaf position at which key would
  1869. // reside in the tree, unless there is an exact match - in which case, the
  1870. // result may not be on a leaf. When there's a three-way comparator, we can
  1871. // return whether there was an exact match. This allows the caller to avoid a
  1872. // subsequent comparison to determine if an exact match was made, which is
  1873. // important for keys with expensive comparison, such as strings.
  1874. template<typename K>
  1875. SearchResult<iterator, is_key_compare_to::value> internal_locate(
  1876. const K& key
  1877. ) const;
  1878. // Internal routine which implements lower_bound().
  1879. template<typename K>
  1880. SearchResult<iterator, is_key_compare_to::value> internal_lower_bound(
  1881. const K& key
  1882. ) const;
  1883. // Internal routine which implements upper_bound().
  1884. template<typename K>
  1885. iterator internal_upper_bound(const K& key) const;
  1886. // Internal routine which implements find().
  1887. template<typename K>
  1888. iterator internal_find(const K& key) const;
  1889. // Verifies the tree structure of node.
  1890. int internal_verify(const node_type* node, const key_type* lo, const key_type* hi) const;
  1891. node_stats internal_stats(const node_type* node) const
  1892. {
  1893. // The root can be a static empty node.
  1894. if (node == nullptr || (node == root() && empty()))
  1895. {
  1896. return node_stats(0, 0);
  1897. }
  1898. if (node->is_leaf())
  1899. {
  1900. return node_stats(1, 0);
  1901. }
  1902. node_stats res(0, 1);
  1903. for (int i = node->start(); i <= node->finish(); ++i)
  1904. {
  1905. res += internal_stats(node->child(i));
  1906. }
  1907. return res;
  1908. }
  1909. node_type* root_;
  1910. // A pointer to the rightmost node. Note that the leftmost node is stored as
  1911. // the root's parent. We use compressed tuple in order to save space because
  1912. // key_compare and allocator_type are usually empty.
  1913. absl::container_internal::CompressedTuple<key_compare, allocator_type, node_type*>
  1914. rightmost_;
  1915. // Number of values.
  1916. size_type size_;
  1917. };
  1918. ////
  1919. // btree_node methods
  1920. template<typename P>
  1921. template<typename... Args>
  1922. inline void btree_node<P>::emplace_value(const size_type i, allocator_type* alloc, Args&&... args)
  1923. {
  1924. assert(i >= start());
  1925. assert(i <= finish());
  1926. // Shift old values to create space for new value and then construct it in
  1927. // place.
  1928. if (i < finish())
  1929. {
  1930. transfer_n_backward(finish() - i, /*dest_i=*/i + 1, /*src_i=*/i, this, alloc);
  1931. }
  1932. value_init(i, alloc, std::forward<Args>(args)...);
  1933. set_finish(finish() + 1);
  1934. if (is_internal() && finish() > i + 1)
  1935. {
  1936. for (field_type j = finish(); j > i + 1; --j)
  1937. {
  1938. set_child(j, child(j - 1));
  1939. }
  1940. clear_child(i + 1);
  1941. }
  1942. }
  1943. template<typename P>
  1944. inline void btree_node<P>::remove_values(const field_type i, const field_type to_erase, allocator_type* alloc)
  1945. {
  1946. // Transfer values after the removed range into their new places.
  1947. value_destroy_n(i, to_erase, alloc);
  1948. const field_type orig_finish = finish();
  1949. const field_type src_i = i + to_erase;
  1950. transfer_n(orig_finish - src_i, i, src_i, this, alloc);
  1951. if (is_internal())
  1952. {
  1953. // Delete all children between begin and end.
  1954. for (int j = 0; j < to_erase; ++j)
  1955. {
  1956. clear_and_delete(child(i + j + 1), alloc);
  1957. }
  1958. // Rotate children after end into new positions.
  1959. for (int j = i + to_erase + 1; j <= orig_finish; ++j)
  1960. {
  1961. set_child(j - to_erase, child(j));
  1962. clear_child(j);
  1963. }
  1964. }
  1965. set_finish(orig_finish - to_erase);
  1966. }
  1967. template<typename P>
  1968. void btree_node<P>::rebalance_right_to_left(const int to_move, btree_node* right, allocator_type* alloc)
  1969. {
  1970. assert(parent() == right->parent());
  1971. assert(position() + 1 == right->position());
  1972. assert(right->count() >= count());
  1973. assert(to_move >= 1);
  1974. assert(to_move <= right->count());
  1975. // 1) Move the delimiting value in the parent to the left node.
  1976. transfer(finish(), position(), parent(), alloc);
  1977. // 2) Move the (to_move - 1) values from the right node to the left node.
  1978. transfer_n(to_move - 1, finish() + 1, right->start(), right, alloc);
  1979. // 3) Move the new delimiting value to the parent from the right node.
  1980. parent()->transfer(position(), right->start() + to_move - 1, right, alloc);
  1981. // 4) Shift the values in the right node to their correct positions.
  1982. right->transfer_n(right->count() - to_move, right->start(), right->start() + to_move, right, alloc);
  1983. if (is_internal())
  1984. {
  1985. // Move the child pointers from the right to the left node.
  1986. for (int i = 0; i < to_move; ++i)
  1987. {
  1988. init_child(finish() + i + 1, right->child(i));
  1989. }
  1990. for (int i = right->start(); i <= right->finish() - to_move; ++i)
  1991. {
  1992. assert(i + to_move <= right->max_count());
  1993. right->init_child(i, right->child(i + to_move));
  1994. right->clear_child(i + to_move);
  1995. }
  1996. }
  1997. // Fixup `finish` on the left and right nodes.
  1998. set_finish(finish() + to_move);
  1999. right->set_finish(right->finish() - to_move);
  2000. }
  2001. template<typename P>
  2002. void btree_node<P>::rebalance_left_to_right(const int to_move, btree_node* right, allocator_type* alloc)
  2003. {
  2004. assert(parent() == right->parent());
  2005. assert(position() + 1 == right->position());
  2006. assert(count() >= right->count());
  2007. assert(to_move >= 1);
  2008. assert(to_move <= count());
  2009. // Values in the right node are shifted to the right to make room for the
  2010. // new to_move values. Then, the delimiting value in the parent and the
  2011. // other (to_move - 1) values in the left node are moved into the right node.
  2012. // Lastly, a new delimiting value is moved from the left node into the
  2013. // parent, and the remaining empty left node entries are destroyed.
  2014. // 1) Shift existing values in the right node to their correct positions.
  2015. right->transfer_n_backward(right->count(), right->start() + to_move, right->start(), right, alloc);
  2016. // 2) Move the delimiting value in the parent to the right node.
  2017. right->transfer(right->start() + to_move - 1, position(), parent(), alloc);
  2018. // 3) Move the (to_move - 1) values from the left node to the right node.
  2019. right->transfer_n(to_move - 1, right->start(), finish() - (to_move - 1), this, alloc);
  2020. // 4) Move the new delimiting value to the parent from the left node.
  2021. parent()->transfer(position(), finish() - to_move, this, alloc);
  2022. if (is_internal())
  2023. {
  2024. // Move the child pointers from the left to the right node.
  2025. for (int i = right->finish(); i >= right->start(); --i)
  2026. {
  2027. right->init_child(i + to_move, right->child(i));
  2028. right->clear_child(i);
  2029. }
  2030. for (int i = 1; i <= to_move; ++i)
  2031. {
  2032. right->init_child(i - 1, child(finish() - to_move + i));
  2033. clear_child(finish() - to_move + i);
  2034. }
  2035. }
  2036. // Fixup the counts on the left and right nodes.
  2037. set_finish(finish() - to_move);
  2038. right->set_finish(right->finish() + to_move);
  2039. }
  2040. template<typename P>
  2041. void btree_node<P>::split(const int insert_position, btree_node* dest, allocator_type* alloc)
  2042. {
  2043. assert(dest->count() == 0);
  2044. assert(max_count() == kNodeSlots);
  2045. // We bias the split based on the position being inserted. If we're
  2046. // inserting at the beginning of the left node then bias the split to put
  2047. // more values on the right node. If we're inserting at the end of the
  2048. // right node then bias the split to put more values on the left node.
  2049. if (insert_position == start())
  2050. {
  2051. dest->set_finish(dest->start() + finish() - 1);
  2052. }
  2053. else if (insert_position == kNodeSlots)
  2054. {
  2055. dest->set_finish(dest->start());
  2056. }
  2057. else
  2058. {
  2059. dest->set_finish(dest->start() + count() / 2);
  2060. }
  2061. set_finish(finish() - dest->count());
  2062. assert(count() >= 1);
  2063. // Move values from the left sibling to the right sibling.
  2064. dest->transfer_n(dest->count(), dest->start(), finish(), this, alloc);
  2065. // The split key is the largest value in the left sibling.
  2066. --mutable_finish();
  2067. parent()->emplace_value(position(), alloc, finish_slot());
  2068. value_destroy(finish(), alloc);
  2069. parent()->init_child(position() + 1, dest);
  2070. if (is_internal())
  2071. {
  2072. for (int i = dest->start(), j = finish() + 1; i <= dest->finish();
  2073. ++i, ++j)
  2074. {
  2075. assert(child(j) != nullptr);
  2076. dest->init_child(i, child(j));
  2077. clear_child(j);
  2078. }
  2079. }
  2080. }
  2081. template<typename P>
  2082. void btree_node<P>::merge(btree_node* src, allocator_type* alloc)
  2083. {
  2084. assert(parent() == src->parent());
  2085. assert(position() + 1 == src->position());
  2086. // Move the delimiting value to the left node.
  2087. value_init(finish(), alloc, parent()->slot(position()));
  2088. // Move the values from the right to the left node.
  2089. transfer_n(src->count(), finish() + 1, src->start(), src, alloc);
  2090. if (is_internal())
  2091. {
  2092. // Move the child pointers from the right to the left node.
  2093. for (int i = src->start(), j = finish() + 1; i <= src->finish(); ++i, ++j)
  2094. {
  2095. init_child(j, src->child(i));
  2096. src->clear_child(i);
  2097. }
  2098. }
  2099. // Fixup `finish` on the src and dest nodes.
  2100. set_finish(start() + 1 + count() + src->count());
  2101. src->set_finish(src->start());
  2102. // Remove the value on the parent node and delete the src node.
  2103. parent()->remove_values(position(), /*to_erase=*/1, alloc);
  2104. }
  2105. template<typename P>
  2106. void btree_node<P>::clear_and_delete(btree_node* node, allocator_type* alloc)
  2107. {
  2108. if (node->is_leaf())
  2109. {
  2110. node->value_destroy_n(node->start(), node->count(), alloc);
  2111. deallocate(LeafSize(node->max_count()), node, alloc);
  2112. return;
  2113. }
  2114. if (node->count() == 0)
  2115. {
  2116. deallocate(InternalSize(), node, alloc);
  2117. return;
  2118. }
  2119. // The parent of the root of the subtree we are deleting.
  2120. btree_node* delete_root_parent = node->parent();
  2121. // Navigate to the leftmost leaf under node, and then delete upwards.
  2122. while (node->is_internal())
  2123. node = node->start_child();
  2124. #ifdef ABSL_BTREE_ENABLE_GENERATIONS
  2125. // When generations are enabled, we delete the leftmost leaf last in case it's
  2126. // the parent of the root and we need to check whether it's a leaf before we
  2127. // can update the root's generation.
  2128. // TODO(ezb): if we change btree_node::is_root to check a bool inside the node
  2129. // instead of checking whether the parent is a leaf, we can remove this logic.
  2130. btree_node* leftmost_leaf = node;
  2131. #endif
  2132. // Use `int` because `pos` needs to be able to hold `kNodeSlots+1`, which
  2133. // isn't guaranteed to be a valid `field_type`.
  2134. int pos = node->position();
  2135. btree_node* parent = node->parent();
  2136. for (;;)
  2137. {
  2138. // In each iteration of the next loop, we delete one leaf node and go right.
  2139. assert(pos <= parent->finish());
  2140. do
  2141. {
  2142. node = parent->child(pos);
  2143. if (node->is_internal())
  2144. {
  2145. // Navigate to the leftmost leaf under node.
  2146. while (node->is_internal())
  2147. node = node->start_child();
  2148. pos = node->position();
  2149. parent = node->parent();
  2150. }
  2151. node->value_destroy_n(node->start(), node->count(), alloc);
  2152. #ifdef ABSL_BTREE_ENABLE_GENERATIONS
  2153. if (leftmost_leaf != node)
  2154. #endif
  2155. deallocate(LeafSize(node->max_count()), node, alloc);
  2156. ++pos;
  2157. } while (pos <= parent->finish());
  2158. // Once we've deleted all children of parent, delete parent and go up/right.
  2159. assert(pos > parent->finish());
  2160. do
  2161. {
  2162. node = parent;
  2163. pos = node->position();
  2164. parent = node->parent();
  2165. node->value_destroy_n(node->start(), node->count(), alloc);
  2166. deallocate(InternalSize(), node, alloc);
  2167. if (parent == delete_root_parent)
  2168. {
  2169. #ifdef ABSL_BTREE_ENABLE_GENERATIONS
  2170. deallocate(LeafSize(leftmost_leaf->max_count()), leftmost_leaf, alloc);
  2171. #endif
  2172. return;
  2173. }
  2174. ++pos;
  2175. } while (pos > parent->finish());
  2176. }
  2177. }
  2178. ////
  2179. // btree_iterator methods
  2180. template<typename N, typename R, typename P>
  2181. void btree_iterator<N, R, P>::increment_slow()
  2182. {
  2183. if (node_->is_leaf())
  2184. {
  2185. assert(position_ >= node_->finish());
  2186. btree_iterator save(*this);
  2187. while (position_ == node_->finish() && !node_->is_root())
  2188. {
  2189. assert(node_->parent()->child(node_->position()) == node_);
  2190. position_ = node_->position();
  2191. node_ = node_->parent();
  2192. }
  2193. // TODO(ezb): assert we aren't incrementing end() instead of handling.
  2194. if (position_ == node_->finish())
  2195. {
  2196. *this = save;
  2197. }
  2198. }
  2199. else
  2200. {
  2201. assert(position_ < node_->finish());
  2202. node_ = node_->child(position_ + 1);
  2203. while (node_->is_internal())
  2204. {
  2205. node_ = node_->start_child();
  2206. }
  2207. position_ = node_->start();
  2208. }
  2209. }
  2210. template<typename N, typename R, typename P>
  2211. void btree_iterator<N, R, P>::decrement_slow()
  2212. {
  2213. if (node_->is_leaf())
  2214. {
  2215. assert(position_ <= -1);
  2216. btree_iterator save(*this);
  2217. while (position_ < node_->start() && !node_->is_root())
  2218. {
  2219. assert(node_->parent()->child(node_->position()) == node_);
  2220. position_ = node_->position() - 1;
  2221. node_ = node_->parent();
  2222. }
  2223. // TODO(ezb): assert we aren't decrementing begin() instead of handling.
  2224. if (position_ < node_->start())
  2225. {
  2226. *this = save;
  2227. }
  2228. }
  2229. else
  2230. {
  2231. assert(position_ >= node_->start());
  2232. node_ = node_->child(position_);
  2233. while (node_->is_internal())
  2234. {
  2235. node_ = node_->child(node_->finish());
  2236. }
  2237. position_ = node_->finish() - 1;
  2238. }
  2239. }
  2240. ////
  2241. // btree methods
  2242. template<typename P>
  2243. template<typename Btree>
  2244. void btree<P>::copy_or_move_values_in_order(Btree& other)
  2245. {
  2246. static_assert(std::is_same<btree, Btree>::value || std::is_same<const btree, Btree>::value, "Btree type must be same or const.");
  2247. assert(empty());
  2248. // We can avoid key comparisons because we know the order of the
  2249. // values is the same order we'll store them in.
  2250. auto iter = other.begin();
  2251. if (iter == other.end())
  2252. return;
  2253. insert_multi(iter.slot());
  2254. ++iter;
  2255. for (; iter != other.end(); ++iter)
  2256. {
  2257. // If the btree is not empty, we can just insert the new value at the end
  2258. // of the tree.
  2259. internal_emplace(end(), iter.slot());
  2260. }
  2261. }
  2262. template<typename P>
  2263. constexpr bool btree<P>::static_assert_validation()
  2264. {
  2265. static_assert(std::is_nothrow_copy_constructible<key_compare>::value, "Key comparison must be nothrow copy constructible");
  2266. static_assert(std::is_nothrow_copy_constructible<allocator_type>::value, "Allocator must be nothrow copy constructible");
  2267. static_assert(type_traits_internal::is_trivially_copyable<iterator>::value, "iterator not trivially copyable.");
  2268. // Note: We assert that kTargetValues, which is computed from
  2269. // Params::kTargetNodeSize, must fit the node_type::field_type.
  2270. static_assert(
  2271. kNodeSlots < (1 << (8 * sizeof(typename node_type::field_type))),
  2272. "target node size too large"
  2273. );
  2274. // Verify that key_compare returns an absl::{weak,strong}_ordering or bool.
  2275. static_assert(
  2276. compare_has_valid_result_type<key_compare, key_type>(),
  2277. "key comparison function must return absl::{weak,strong}_ordering or "
  2278. "bool."
  2279. );
  2280. // Test the assumption made in setting kNodeSlotSpace.
  2281. static_assert(node_type::MinimumOverhead() >= sizeof(void*) + 4, "node space assumption incorrect");
  2282. return true;
  2283. }
  2284. template<typename P>
  2285. template<typename K>
  2286. auto btree<P>::lower_bound_equal(const K& key) const
  2287. -> std::pair<iterator, bool>
  2288. {
  2289. const SearchResult<iterator, is_key_compare_to::value> res =
  2290. internal_lower_bound(key);
  2291. const iterator lower = iterator(internal_end(res.value));
  2292. const bool equal = res.HasMatch() ? res.IsEq() : lower != end() && !compare_keys(key, lower.key());
  2293. return {lower, equal};
  2294. }
  2295. template<typename P>
  2296. template<typename K>
  2297. auto btree<P>::equal_range(const K& key) -> std::pair<iterator, iterator>
  2298. {
  2299. const std::pair<iterator, bool> lower_and_equal = lower_bound_equal(key);
  2300. const iterator lower = lower_and_equal.first;
  2301. if (!lower_and_equal.second)
  2302. {
  2303. return {lower, lower};
  2304. }
  2305. const iterator next = std::next(lower);
  2306. if (!params_type::template can_have_multiple_equivalent_keys<K>())
  2307. {
  2308. // The next iterator after lower must point to a key greater than `key`.
  2309. // Note: if this assert fails, then it may indicate that the comparator does
  2310. // not meet the equivalence requirements for Compare
  2311. // (see https://en.cppreference.com/w/cpp/named_req/Compare).
  2312. assert(next == end() || compare_keys(key, next.key()));
  2313. return {lower, next};
  2314. }
  2315. // Try once more to avoid the call to upper_bound() if there's only one
  2316. // equivalent key. This should prevent all calls to upper_bound() in cases of
  2317. // unique-containers with heterogeneous comparators in which all comparison
  2318. // operators have the same equivalence classes.
  2319. if (next == end() || compare_keys(key, next.key()))
  2320. return {lower, next};
  2321. // In this case, we need to call upper_bound() to avoid worst case O(N)
  2322. // behavior if we were to iterate over equal keys.
  2323. return {lower, upper_bound(key)};
  2324. }
  2325. template<typename P>
  2326. template<typename K, typename... Args>
  2327. auto btree<P>::insert_unique(const K& key, Args&&... args)
  2328. -> std::pair<iterator, bool>
  2329. {
  2330. if (empty())
  2331. {
  2332. mutable_root() = mutable_rightmost() = new_leaf_root_node(1);
  2333. }
  2334. SearchResult<iterator, is_key_compare_to::value> res = internal_locate(key);
  2335. iterator iter = res.value;
  2336. if (res.HasMatch())
  2337. {
  2338. if (res.IsEq())
  2339. {
  2340. // The key already exists in the tree, do nothing.
  2341. return {iter, false};
  2342. }
  2343. }
  2344. else
  2345. {
  2346. iterator last = internal_last(iter);
  2347. if (last.node_ && !compare_keys(key, last.key()))
  2348. {
  2349. // The key already exists in the tree, do nothing.
  2350. return {last, false};
  2351. }
  2352. }
  2353. return {internal_emplace(iter, std::forward<Args>(args)...), true};
  2354. }
  2355. template<typename P>
  2356. template<typename K, typename... Args>
  2357. inline auto btree<P>::insert_hint_unique(iterator position, const K& key, Args&&... args)
  2358. -> std::pair<iterator, bool>
  2359. {
  2360. if (!empty())
  2361. {
  2362. if (position == end() || compare_keys(key, position.key()))
  2363. {
  2364. if (position == begin() || compare_keys(std::prev(position).key(), key))
  2365. {
  2366. // prev.key() < key < position.key()
  2367. return {internal_emplace(position, std::forward<Args>(args)...), true};
  2368. }
  2369. }
  2370. else if (compare_keys(position.key(), key))
  2371. {
  2372. ++position;
  2373. if (position == end() || compare_keys(key, position.key()))
  2374. {
  2375. // {original `position`}.key() < key < {current `position`}.key()
  2376. return {internal_emplace(position, std::forward<Args>(args)...), true};
  2377. }
  2378. }
  2379. else
  2380. {
  2381. // position.key() == key
  2382. return {position, false};
  2383. }
  2384. }
  2385. return insert_unique(key, std::forward<Args>(args)...);
  2386. }
  2387. template<typename P>
  2388. template<typename InputIterator, typename>
  2389. void btree<P>::insert_iterator_unique(InputIterator b, InputIterator e, int)
  2390. {
  2391. for (; b != e; ++b)
  2392. {
  2393. insert_hint_unique(end(), params_type::key(*b), *b);
  2394. }
  2395. }
  2396. template<typename P>
  2397. template<typename InputIterator>
  2398. void btree<P>::insert_iterator_unique(InputIterator b, InputIterator e, char)
  2399. {
  2400. for (; b != e; ++b)
  2401. {
  2402. // Use a node handle to manage a temp slot.
  2403. auto node_handle =
  2404. CommonAccess::Construct<node_handle_type>(get_allocator(), *b);
  2405. slot_type* slot = CommonAccess::GetSlot(node_handle);
  2406. insert_hint_unique(end(), params_type::key(slot), slot);
  2407. }
  2408. }
  2409. template<typename P>
  2410. template<typename ValueType>
  2411. auto btree<P>::insert_multi(const key_type& key, ValueType&& v) -> iterator
  2412. {
  2413. if (empty())
  2414. {
  2415. mutable_root() = mutable_rightmost() = new_leaf_root_node(1);
  2416. }
  2417. iterator iter = internal_upper_bound(key);
  2418. if (iter.node_ == nullptr)
  2419. {
  2420. iter = end();
  2421. }
  2422. return internal_emplace(iter, std::forward<ValueType>(v));
  2423. }
  2424. template<typename P>
  2425. template<typename ValueType>
  2426. auto btree<P>::insert_hint_multi(iterator position, ValueType&& v) -> iterator
  2427. {
  2428. if (!empty())
  2429. {
  2430. const key_type& key = params_type::key(v);
  2431. if (position == end() || !compare_keys(position.key(), key))
  2432. {
  2433. if (position == begin() ||
  2434. !compare_keys(key, std::prev(position).key()))
  2435. {
  2436. // prev.key() <= key <= position.key()
  2437. return internal_emplace(position, std::forward<ValueType>(v));
  2438. }
  2439. }
  2440. else
  2441. {
  2442. ++position;
  2443. if (position == end() || !compare_keys(position.key(), key))
  2444. {
  2445. // {original `position`}.key() < key < {current `position`}.key()
  2446. return internal_emplace(position, std::forward<ValueType>(v));
  2447. }
  2448. }
  2449. }
  2450. return insert_multi(std::forward<ValueType>(v));
  2451. }
  2452. template<typename P>
  2453. template<typename InputIterator>
  2454. void btree<P>::insert_iterator_multi(InputIterator b, InputIterator e)
  2455. {
  2456. for (; b != e; ++b)
  2457. {
  2458. insert_hint_multi(end(), *b);
  2459. }
  2460. }
  2461. template<typename P>
  2462. auto btree<P>::operator=(const btree& other) -> btree&
  2463. {
  2464. if (this != &other)
  2465. {
  2466. clear();
  2467. *mutable_key_comp() = other.key_comp();
  2468. if (absl::allocator_traits<
  2469. allocator_type>::propagate_on_container_copy_assignment::value)
  2470. {
  2471. *mutable_allocator() = other.allocator();
  2472. }
  2473. copy_or_move_values_in_order(other);
  2474. }
  2475. return *this;
  2476. }
  2477. template<typename P>
  2478. auto btree<P>::operator=(btree&& other) noexcept -> btree&
  2479. {
  2480. if (this != &other)
  2481. {
  2482. clear();
  2483. using std::swap;
  2484. if (absl::allocator_traits<
  2485. allocator_type>::propagate_on_container_copy_assignment::value)
  2486. {
  2487. swap(root_, other.root_);
  2488. // Note: `rightmost_` also contains the allocator and the key comparator.
  2489. swap(rightmost_, other.rightmost_);
  2490. swap(size_, other.size_);
  2491. }
  2492. else
  2493. {
  2494. if (allocator() == other.allocator())
  2495. {
  2496. swap(mutable_root(), other.mutable_root());
  2497. swap(*mutable_key_comp(), *other.mutable_key_comp());
  2498. swap(mutable_rightmost(), other.mutable_rightmost());
  2499. swap(size_, other.size_);
  2500. }
  2501. else
  2502. {
  2503. // We aren't allowed to propagate the allocator and the allocator is
  2504. // different so we can't take over its memory. We must move each element
  2505. // individually. We need both `other` and `this` to have `other`s key
  2506. // comparator while moving the values so we can't swap the key
  2507. // comparators.
  2508. *mutable_key_comp() = other.key_comp();
  2509. copy_or_move_values_in_order(other);
  2510. }
  2511. }
  2512. }
  2513. return *this;
  2514. }
  2515. template<typename P>
  2516. auto btree<P>::erase(iterator iter) -> iterator
  2517. {
  2518. iter.node_->value_destroy(iter.position_, mutable_allocator());
  2519. iter.update_generation();
  2520. const bool internal_delete = iter.node_->is_internal();
  2521. if (internal_delete)
  2522. {
  2523. // Deletion of a value on an internal node. First, transfer the largest
  2524. // value from our left child here, then erase/rebalance from that position.
  2525. // We can get to the largest value from our left child by decrementing iter.
  2526. iterator internal_iter(iter);
  2527. --iter;
  2528. assert(iter.node_->is_leaf());
  2529. internal_iter.node_->transfer(internal_iter.position_, iter.position_, iter.node_, mutable_allocator());
  2530. }
  2531. else
  2532. {
  2533. // Shift values after erased position in leaf. In the internal case, we
  2534. // don't need to do this because the leaf position is the end of the node.
  2535. const field_type transfer_from = iter.position_ + 1;
  2536. const field_type num_to_transfer = iter.node_->finish() - transfer_from;
  2537. iter.node_->transfer_n(num_to_transfer, iter.position_, transfer_from, iter.node_, mutable_allocator());
  2538. }
  2539. // Update node finish and container size.
  2540. iter.node_->set_finish(iter.node_->finish() - 1);
  2541. --size_;
  2542. // We want to return the next value after the one we just erased. If we
  2543. // erased from an internal node (internal_delete == true), then the next
  2544. // value is ++(++iter). If we erased from a leaf node (internal_delete ==
  2545. // false) then the next value is ++iter. Note that ++iter may point to an
  2546. // internal node and the value in the internal node may move to a leaf node
  2547. // (iter.node_) when rebalancing is performed at the leaf level.
  2548. iterator res = rebalance_after_delete(iter);
  2549. // If we erased from an internal node, advance the iterator.
  2550. if (internal_delete)
  2551. {
  2552. ++res;
  2553. }
  2554. return res;
  2555. }
  2556. template<typename P>
  2557. auto btree<P>::rebalance_after_delete(iterator iter) -> iterator
  2558. {
  2559. // Merge/rebalance as we walk back up the tree.
  2560. iterator res(iter);
  2561. bool first_iteration = true;
  2562. for (;;)
  2563. {
  2564. if (iter.node_ == root())
  2565. {
  2566. try_shrink();
  2567. if (empty())
  2568. {
  2569. return end();
  2570. }
  2571. break;
  2572. }
  2573. if (iter.node_->count() >= kMinNodeValues)
  2574. {
  2575. break;
  2576. }
  2577. bool merged = try_merge_or_rebalance(&iter);
  2578. // On the first iteration, we should update `res` with `iter` because `res`
  2579. // may have been invalidated.
  2580. if (first_iteration)
  2581. {
  2582. res = iter;
  2583. first_iteration = false;
  2584. }
  2585. if (!merged)
  2586. {
  2587. break;
  2588. }
  2589. iter.position_ = iter.node_->position();
  2590. iter.node_ = iter.node_->parent();
  2591. }
  2592. res.update_generation();
  2593. // Adjust our return value. If we're pointing at the end of a node, advance
  2594. // the iterator.
  2595. if (res.position_ == res.node_->finish())
  2596. {
  2597. res.position_ = res.node_->finish() - 1;
  2598. ++res;
  2599. }
  2600. return res;
  2601. }
  2602. template<typename P>
  2603. auto btree<P>::erase_range(iterator begin, iterator end)
  2604. -> std::pair<size_type, iterator>
  2605. {
  2606. difference_type count = std::distance(begin, end);
  2607. assert(count >= 0);
  2608. if (count == 0)
  2609. {
  2610. return {0, begin};
  2611. }
  2612. if (static_cast<size_type>(count) == size_)
  2613. {
  2614. clear();
  2615. return {count, this->end()};
  2616. }
  2617. if (begin.node_ == end.node_)
  2618. {
  2619. assert(end.position_ > begin.position_);
  2620. begin.node_->remove_values(begin.position_, end.position_ - begin.position_, mutable_allocator());
  2621. size_ -= count;
  2622. return {count, rebalance_after_delete(begin)};
  2623. }
  2624. const size_type target_size = size_ - count;
  2625. while (size_ > target_size)
  2626. {
  2627. if (begin.node_->is_leaf())
  2628. {
  2629. const size_type remaining_to_erase = size_ - target_size;
  2630. const size_type remaining_in_node =
  2631. begin.node_->finish() - begin.position_;
  2632. const size_type to_erase =
  2633. (std::min)(remaining_to_erase, remaining_in_node);
  2634. begin.node_->remove_values(begin.position_, to_erase, mutable_allocator());
  2635. size_ -= to_erase;
  2636. begin = rebalance_after_delete(begin);
  2637. }
  2638. else
  2639. {
  2640. begin = erase(begin);
  2641. }
  2642. }
  2643. begin.update_generation();
  2644. return {count, begin};
  2645. }
  2646. template<typename P>
  2647. void btree<P>::clear()
  2648. {
  2649. if (!empty())
  2650. {
  2651. node_type::clear_and_delete(root(), mutable_allocator());
  2652. }
  2653. mutable_root() = mutable_rightmost() = EmptyNode();
  2654. size_ = 0;
  2655. }
  2656. template<typename P>
  2657. void btree<P>::swap(btree& other)
  2658. {
  2659. using std::swap;
  2660. if (absl::allocator_traits<
  2661. allocator_type>::propagate_on_container_swap::value)
  2662. {
  2663. // Note: `rightmost_` also contains the allocator and the key comparator.
  2664. swap(rightmost_, other.rightmost_);
  2665. }
  2666. else
  2667. {
  2668. // It's undefined behavior if the allocators are unequal here.
  2669. assert(allocator() == other.allocator());
  2670. swap(mutable_rightmost(), other.mutable_rightmost());
  2671. swap(*mutable_key_comp(), *other.mutable_key_comp());
  2672. }
  2673. swap(mutable_root(), other.mutable_root());
  2674. swap(size_, other.size_);
  2675. }
  2676. template<typename P>
  2677. void btree<P>::verify() const
  2678. {
  2679. assert(root() != nullptr);
  2680. assert(leftmost() != nullptr);
  2681. assert(rightmost() != nullptr);
  2682. assert(empty() || size() == internal_verify(root(), nullptr, nullptr));
  2683. assert(leftmost() == (++const_iterator(root(), -1)).node_);
  2684. assert(rightmost() == (--const_iterator(root(), root()->finish())).node_);
  2685. assert(leftmost()->is_leaf());
  2686. assert(rightmost()->is_leaf());
  2687. }
  2688. template<typename P>
  2689. void btree<P>::rebalance_or_split(iterator* iter)
  2690. {
  2691. node_type*& node = iter->node_;
  2692. int& insert_position = iter->position_;
  2693. assert(node->count() == node->max_count());
  2694. assert(kNodeSlots == node->max_count());
  2695. // First try to make room on the node by rebalancing.
  2696. node_type* parent = node->parent();
  2697. if (node != root())
  2698. {
  2699. if (node->position() > parent->start())
  2700. {
  2701. // Try rebalancing with our left sibling.
  2702. node_type* left = parent->child(node->position() - 1);
  2703. assert(left->max_count() == kNodeSlots);
  2704. if (left->count() < kNodeSlots)
  2705. {
  2706. // We bias rebalancing based on the position being inserted. If we're
  2707. // inserting at the end of the right node then we bias rebalancing to
  2708. // fill up the left node.
  2709. int to_move = (kNodeSlots - left->count()) /
  2710. (1 + (insert_position < static_cast<int>(kNodeSlots)));
  2711. to_move = (std::max)(1, to_move);
  2712. if (insert_position - to_move >= node->start() ||
  2713. left->count() + to_move < static_cast<int>(kNodeSlots))
  2714. {
  2715. left->rebalance_right_to_left(to_move, node, mutable_allocator());
  2716. assert(node->max_count() - node->count() == to_move);
  2717. insert_position = insert_position - to_move;
  2718. if (insert_position < node->start())
  2719. {
  2720. insert_position = insert_position + left->count() + 1;
  2721. node = left;
  2722. }
  2723. assert(node->count() < node->max_count());
  2724. return;
  2725. }
  2726. }
  2727. }
  2728. if (node->position() < parent->finish())
  2729. {
  2730. // Try rebalancing with our right sibling.
  2731. node_type* right = parent->child(node->position() + 1);
  2732. assert(right->max_count() == kNodeSlots);
  2733. if (right->count() < kNodeSlots)
  2734. {
  2735. // We bias rebalancing based on the position being inserted. If we're
  2736. // inserting at the beginning of the left node then we bias rebalancing
  2737. // to fill up the right node.
  2738. int to_move = (static_cast<int>(kNodeSlots) - right->count()) /
  2739. (1 + (insert_position > node->start()));
  2740. to_move = (std::max)(1, to_move);
  2741. if (insert_position <= node->finish() - to_move ||
  2742. right->count() + to_move < static_cast<int>(kNodeSlots))
  2743. {
  2744. node->rebalance_left_to_right(to_move, right, mutable_allocator());
  2745. if (insert_position > node->finish())
  2746. {
  2747. insert_position = insert_position - node->count() - 1;
  2748. node = right;
  2749. }
  2750. assert(node->count() < node->max_count());
  2751. return;
  2752. }
  2753. }
  2754. }
  2755. // Rebalancing failed, make sure there is room on the parent node for a new
  2756. // value.
  2757. assert(parent->max_count() == kNodeSlots);
  2758. if (parent->count() == kNodeSlots)
  2759. {
  2760. iterator parent_iter(node->parent(), node->position());
  2761. rebalance_or_split(&parent_iter);
  2762. }
  2763. }
  2764. else
  2765. {
  2766. // Rebalancing not possible because this is the root node.
  2767. // Create a new root node and set the current root node as the child of the
  2768. // new root.
  2769. parent = new_internal_node(parent);
  2770. parent->set_generation(root()->generation());
  2771. parent->init_child(parent->start(), root());
  2772. mutable_root() = parent;
  2773. // If the former root was a leaf node, then it's now the rightmost node.
  2774. assert(parent->start_child()->is_internal() || parent->start_child() == rightmost());
  2775. }
  2776. // Split the node.
  2777. node_type* split_node;
  2778. if (node->is_leaf())
  2779. {
  2780. split_node = new_leaf_node(parent);
  2781. node->split(insert_position, split_node, mutable_allocator());
  2782. if (rightmost() == node)
  2783. mutable_rightmost() = split_node;
  2784. }
  2785. else
  2786. {
  2787. split_node = new_internal_node(parent);
  2788. node->split(insert_position, split_node, mutable_allocator());
  2789. }
  2790. if (insert_position > node->finish())
  2791. {
  2792. insert_position = insert_position - node->count() - 1;
  2793. node = split_node;
  2794. }
  2795. }
  2796. template<typename P>
  2797. void btree<P>::merge_nodes(node_type* left, node_type* right)
  2798. {
  2799. left->merge(right, mutable_allocator());
  2800. if (rightmost() == right)
  2801. mutable_rightmost() = left;
  2802. }
  2803. template<typename P>
  2804. bool btree<P>::try_merge_or_rebalance(iterator* iter)
  2805. {
  2806. node_type* parent = iter->node_->parent();
  2807. if (iter->node_->position() > parent->start())
  2808. {
  2809. // Try merging with our left sibling.
  2810. node_type* left = parent->child(iter->node_->position() - 1);
  2811. assert(left->max_count() == kNodeSlots);
  2812. if (1U + left->count() + iter->node_->count() <= kNodeSlots)
  2813. {
  2814. iter->position_ += 1 + left->count();
  2815. merge_nodes(left, iter->node_);
  2816. iter->node_ = left;
  2817. return true;
  2818. }
  2819. }
  2820. if (iter->node_->position() < parent->finish())
  2821. {
  2822. // Try merging with our right sibling.
  2823. node_type* right = parent->child(iter->node_->position() + 1);
  2824. assert(right->max_count() == kNodeSlots);
  2825. if (1U + iter->node_->count() + right->count() <= kNodeSlots)
  2826. {
  2827. merge_nodes(iter->node_, right);
  2828. return true;
  2829. }
  2830. // Try rebalancing with our right sibling. We don't perform rebalancing if
  2831. // we deleted the first element from iter->node_ and the node is not
  2832. // empty. This is a small optimization for the common pattern of deleting
  2833. // from the front of the tree.
  2834. if (right->count() > kMinNodeValues &&
  2835. (iter->node_->count() == 0 || iter->position_ > iter->node_->start()))
  2836. {
  2837. int to_move = (right->count() - iter->node_->count()) / 2;
  2838. to_move = (std::min)(to_move, right->count() - 1);
  2839. iter->node_->rebalance_right_to_left(to_move, right, mutable_allocator());
  2840. return false;
  2841. }
  2842. }
  2843. if (iter->node_->position() > parent->start())
  2844. {
  2845. // Try rebalancing with our left sibling. We don't perform rebalancing if
  2846. // we deleted the last element from iter->node_ and the node is not
  2847. // empty. This is a small optimization for the common pattern of deleting
  2848. // from the back of the tree.
  2849. node_type* left = parent->child(iter->node_->position() - 1);
  2850. if (left->count() > kMinNodeValues &&
  2851. (iter->node_->count() == 0 ||
  2852. iter->position_ < iter->node_->finish()))
  2853. {
  2854. int to_move = (left->count() - iter->node_->count()) / 2;
  2855. to_move = (std::min)(to_move, left->count() - 1);
  2856. left->rebalance_left_to_right(to_move, iter->node_, mutable_allocator());
  2857. iter->position_ += to_move;
  2858. return false;
  2859. }
  2860. }
  2861. return false;
  2862. }
  2863. template<typename P>
  2864. void btree<P>::try_shrink()
  2865. {
  2866. node_type* orig_root = root();
  2867. if (orig_root->count() > 0)
  2868. {
  2869. return;
  2870. }
  2871. // Deleted the last item on the root node, shrink the height of the tree.
  2872. if (orig_root->is_leaf())
  2873. {
  2874. assert(size() == 0);
  2875. mutable_root() = mutable_rightmost() = EmptyNode();
  2876. }
  2877. else
  2878. {
  2879. node_type* child = orig_root->start_child();
  2880. child->make_root();
  2881. mutable_root() = child;
  2882. }
  2883. node_type::clear_and_delete(orig_root, mutable_allocator());
  2884. }
  2885. template<typename P>
  2886. template<typename IterType>
  2887. inline IterType btree<P>::internal_last(IterType iter)
  2888. {
  2889. assert(iter.node_ != nullptr);
  2890. while (iter.position_ == iter.node_->finish())
  2891. {
  2892. iter.position_ = iter.node_->position();
  2893. iter.node_ = iter.node_->parent();
  2894. if (iter.node_->is_leaf())
  2895. {
  2896. iter.node_ = nullptr;
  2897. break;
  2898. }
  2899. }
  2900. iter.update_generation();
  2901. return iter;
  2902. }
  2903. template<typename P>
  2904. template<typename... Args>
  2905. inline auto btree<P>::internal_emplace(iterator iter, Args&&... args)
  2906. -> iterator
  2907. {
  2908. if (iter.node_->is_internal())
  2909. {
  2910. // We can't insert on an internal node. Instead, we'll insert after the
  2911. // previous value which is guaranteed to be on a leaf node.
  2912. --iter;
  2913. ++iter.position_;
  2914. }
  2915. const field_type max_count = iter.node_->max_count();
  2916. allocator_type* alloc = mutable_allocator();
  2917. if (iter.node_->count() == max_count)
  2918. {
  2919. // Make room in the leaf for the new item.
  2920. if (max_count < kNodeSlots)
  2921. {
  2922. // Insertion into the root where the root is smaller than the full node
  2923. // size. Simply grow the size of the root node.
  2924. assert(iter.node_ == root());
  2925. iter.node_ =
  2926. new_leaf_root_node((std::min<int>)(kNodeSlots, 2 * max_count));
  2927. // Transfer the values from the old root to the new root.
  2928. node_type* old_root = root();
  2929. node_type* new_root = iter.node_;
  2930. new_root->transfer_n(old_root->count(), new_root->start(), old_root->start(), old_root, alloc);
  2931. new_root->set_finish(old_root->finish());
  2932. old_root->set_finish(old_root->start());
  2933. new_root->set_generation(old_root->generation());
  2934. node_type::clear_and_delete(old_root, alloc);
  2935. mutable_root() = mutable_rightmost() = new_root;
  2936. }
  2937. else
  2938. {
  2939. rebalance_or_split(&iter);
  2940. }
  2941. }
  2942. iter.node_->emplace_value(iter.position_, alloc, std::forward<Args>(args)...);
  2943. ++size_;
  2944. iter.update_generation();
  2945. return iter;
  2946. }
  2947. template<typename P>
  2948. template<typename K>
  2949. inline auto btree<P>::internal_locate(const K& key) const
  2950. -> SearchResult<iterator, is_key_compare_to::value>
  2951. {
  2952. iterator iter(const_cast<node_type*>(root()));
  2953. for (;;)
  2954. {
  2955. SearchResult<int, is_key_compare_to::value> res =
  2956. iter.node_->lower_bound(key, key_comp());
  2957. iter.position_ = res.value;
  2958. if (res.IsEq())
  2959. {
  2960. return {iter, MatchKind::kEq};
  2961. }
  2962. // Note: in the non-key-compare-to case, we don't need to walk all the way
  2963. // down the tree if the keys are equal, but determining equality would
  2964. // require doing an extra comparison on each node on the way down, and we
  2965. // will need to go all the way to the leaf node in the expected case.
  2966. if (iter.node_->is_leaf())
  2967. {
  2968. break;
  2969. }
  2970. iter.node_ = iter.node_->child(iter.position_);
  2971. }
  2972. // Note: in the non-key-compare-to case, the key may actually be equivalent
  2973. // here (and the MatchKind::kNe is ignored).
  2974. return {iter, MatchKind::kNe};
  2975. }
  2976. template<typename P>
  2977. template<typename K>
  2978. auto btree<P>::internal_lower_bound(const K& key) const
  2979. -> SearchResult<iterator, is_key_compare_to::value>
  2980. {
  2981. if (!params_type::template can_have_multiple_equivalent_keys<K>())
  2982. {
  2983. SearchResult<iterator, is_key_compare_to::value> ret = internal_locate(key);
  2984. ret.value = internal_last(ret.value);
  2985. return ret;
  2986. }
  2987. iterator iter(const_cast<node_type*>(root()));
  2988. SearchResult<int, is_key_compare_to::value> res;
  2989. bool seen_eq = false;
  2990. for (;;)
  2991. {
  2992. res = iter.node_->lower_bound(key, key_comp());
  2993. iter.position_ = res.value;
  2994. if (iter.node_->is_leaf())
  2995. {
  2996. break;
  2997. }
  2998. seen_eq = seen_eq || res.IsEq();
  2999. iter.node_ = iter.node_->child(iter.position_);
  3000. }
  3001. if (res.IsEq())
  3002. return {iter, MatchKind::kEq};
  3003. return {internal_last(iter), seen_eq ? MatchKind::kEq : MatchKind::kNe};
  3004. }
  3005. template<typename P>
  3006. template<typename K>
  3007. auto btree<P>::internal_upper_bound(const K& key) const -> iterator
  3008. {
  3009. iterator iter(const_cast<node_type*>(root()));
  3010. for (;;)
  3011. {
  3012. iter.position_ = iter.node_->upper_bound(key, key_comp());
  3013. if (iter.node_->is_leaf())
  3014. {
  3015. break;
  3016. }
  3017. iter.node_ = iter.node_->child(iter.position_);
  3018. }
  3019. return internal_last(iter);
  3020. }
  3021. template<typename P>
  3022. template<typename K>
  3023. auto btree<P>::internal_find(const K& key) const -> iterator
  3024. {
  3025. SearchResult<iterator, is_key_compare_to::value> res = internal_locate(key);
  3026. if (res.HasMatch())
  3027. {
  3028. if (res.IsEq())
  3029. {
  3030. return res.value;
  3031. }
  3032. }
  3033. else
  3034. {
  3035. const iterator iter = internal_last(res.value);
  3036. if (iter.node_ != nullptr && !compare_keys(key, iter.key()))
  3037. {
  3038. return iter;
  3039. }
  3040. }
  3041. return {nullptr, 0};
  3042. }
  3043. template<typename P>
  3044. int btree<P>::internal_verify(const node_type* node, const key_type* lo, const key_type* hi) const
  3045. {
  3046. assert(node->count() > 0);
  3047. assert(node->count() <= node->max_count());
  3048. if (lo)
  3049. {
  3050. assert(!compare_keys(node->key(node->start()), *lo));
  3051. }
  3052. if (hi)
  3053. {
  3054. assert(!compare_keys(*hi, node->key(node->finish() - 1)));
  3055. }
  3056. for (int i = node->start() + 1; i < node->finish(); ++i)
  3057. {
  3058. assert(!compare_keys(node->key(i), node->key(i - 1)));
  3059. }
  3060. int count = node->count();
  3061. if (node->is_internal())
  3062. {
  3063. for (int i = node->start(); i <= node->finish(); ++i)
  3064. {
  3065. assert(node->child(i) != nullptr);
  3066. assert(node->child(i)->parent() == node);
  3067. assert(node->child(i)->position() == i);
  3068. count += internal_verify(node->child(i), i == node->start() ? lo : &node->key(i - 1), i == node->finish() ? hi : &node->key(i));
  3069. }
  3070. }
  3071. return count;
  3072. }
  3073. struct btree_access
  3074. {
  3075. template<typename BtreeContainer, typename Pred>
  3076. static auto erase_if(BtreeContainer& container, Pred pred)
  3077. -> typename BtreeContainer::size_type
  3078. {
  3079. const auto initial_size = container.size();
  3080. auto& tree = container.tree_;
  3081. auto* alloc = tree.mutable_allocator();
  3082. for (auto it = container.begin(); it != container.end();)
  3083. {
  3084. if (!pred(*it))
  3085. {
  3086. ++it;
  3087. continue;
  3088. }
  3089. auto* node = it.node_;
  3090. if (node->is_internal())
  3091. {
  3092. // Handle internal nodes normally.
  3093. it = container.erase(it);
  3094. continue;
  3095. }
  3096. // If this is a leaf node, then we do all the erases from this node
  3097. // at once before doing rebalancing.
  3098. // The current position to transfer slots to.
  3099. int to_pos = it.position_;
  3100. node->value_destroy(it.position_, alloc);
  3101. while (++it.position_ < node->finish())
  3102. {
  3103. it.update_generation();
  3104. if (pred(*it))
  3105. {
  3106. node->value_destroy(it.position_, alloc);
  3107. }
  3108. else
  3109. {
  3110. node->transfer(node->slot(to_pos++), node->slot(it.position_), alloc);
  3111. }
  3112. }
  3113. const int num_deleted = node->finish() - to_pos;
  3114. tree.size_ -= num_deleted;
  3115. node->set_finish(to_pos);
  3116. it.position_ = to_pos;
  3117. it = tree.rebalance_after_delete(it);
  3118. }
  3119. return initial_size - container.size();
  3120. }
  3121. };
  3122. #undef ABSL_BTREE_ENABLE_GENERATIONS
  3123. } // namespace container_internal
  3124. ABSL_NAMESPACE_END
  3125. } // namespace absl
  3126. #endif // ABSL_CONTAINER_INTERNAL_BTREE_H_