You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

repeated_field.h 55 kB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415
  1. // Protocol Buffers - Google's data interchange format
  2. // Copyright 2008 Google Inc. All rights reserved.
  3. // https://developers.google.com/protocol-buffers/
  4. //
  5. // Redistribution and use in source and binary forms, with or without
  6. // modification, are permitted provided that the following conditions are
  7. // met:
  8. //
  9. // * Redistributions of source code must retain the above copyright
  10. // notice, this list of conditions and the following disclaimer.
  11. // * Redistributions in binary form must reproduce the above
  12. // copyright notice, this list of conditions and the following disclaimer
  13. // in the documentation and/or other materials provided with the
  14. // distribution.
  15. // * Neither the name of Google Inc. nor the names of its
  16. // contributors may be used to endorse or promote products derived from
  17. // this software without specific prior written permission.
  18. //
  19. // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
  20. // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
  21. // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
  22. // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
  23. // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
  24. // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
  25. // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
  26. // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
  27. // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
  28. // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
  29. // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
  30. // Author: kenton@google.com (Kenton Varda)
  31. // Based on original Protocol Buffers design by
  32. // Sanjay Ghemawat, Jeff Dean, and others.
  33. //
  34. // RepeatedField and RepeatedPtrField are used by generated protocol message
  35. // classes to manipulate repeated fields. These classes are very similar to
  36. // STL's vector, but include a number of optimizations found to be useful
  37. // specifically in the case of Protocol Buffers. RepeatedPtrField is
  38. // particularly different from STL vector as it manages ownership of the
  39. // pointers that it contains.
  40. //
  41. // This header covers RepeatedField.
  42. #ifndef GOOGLE_PROTOBUF_REPEATED_FIELD_H__
  43. #define GOOGLE_PROTOBUF_REPEATED_FIELD_H__
  44. #include <algorithm>
  45. #include <iterator>
  46. #include <limits>
  47. #include <string>
  48. #include <type_traits>
  49. #include <utility>
  50. #include <google/protobuf/stubs/logging.h>
  51. #include <google/protobuf/stubs/common.h>
  52. #include <google/protobuf/arena.h>
  53. #include <google/protobuf/port.h>
  54. #include <google/protobuf/message_lite.h>
  55. #include <google/protobuf/repeated_ptr_field.h>
  56. // Must be included last.
  57. #include <google/protobuf/port_def.inc>
  58. #ifdef SWIG
  59. #error "You cannot SWIG proto headers"
  60. #endif
  61. namespace google
  62. {
  63. namespace protobuf
  64. {
  65. class Message;
  66. namespace internal
  67. {
  68. template<typename T, int kRepHeaderSize>
  69. constexpr int RepeatedFieldLowerClampLimit()
  70. {
  71. // The header is padded to be at least `sizeof(T)` when it would be smaller
  72. // otherwise.
  73. static_assert(sizeof(T) <= kRepHeaderSize, "");
  74. // We want to pad the minimum size to be a power of two bytes, including the
  75. // header.
  76. // The first allocation is kRepHeaderSize bytes worth of elements for a total
  77. // of 2*kRepHeaderSize bytes.
  78. // For an 8-byte header, we allocate 8 bool, 2 ints, or 1 int64.
  79. return kRepHeaderSize / sizeof(T);
  80. }
  81. // kRepeatedFieldUpperClampLimit is the lowest signed integer value that
  82. // overflows when multiplied by 2 (which is undefined behavior). Sizes above
  83. // this will clamp to the maximum int value instead of following exponential
  84. // growth when growing a repeated field.
  85. constexpr int kRepeatedFieldUpperClampLimit =
  86. (std::numeric_limits<int>::max() / 2) + 1;
  87. template<typename Iter>
  88. inline int CalculateReserve(Iter begin, Iter end, std::forward_iterator_tag)
  89. {
  90. return static_cast<int>(std::distance(begin, end));
  91. }
  92. template<typename Iter>
  93. inline int CalculateReserve(Iter /*begin*/, Iter /*end*/, std::input_iterator_tag /*unused*/)
  94. {
  95. return -1;
  96. }
  97. template<typename Iter>
  98. inline int CalculateReserve(Iter begin, Iter end)
  99. {
  100. typedef typename std::iterator_traits<Iter>::iterator_category Category;
  101. return CalculateReserve(begin, end, Category());
  102. }
  103. // Swaps two blocks of memory of size sizeof(T).
  104. template<typename T>
  105. inline void SwapBlock(char* p, char* q)
  106. {
  107. T tmp;
  108. memcpy(&tmp, p, sizeof(T));
  109. memcpy(p, q, sizeof(T));
  110. memcpy(q, &tmp, sizeof(T));
  111. }
  112. // Swaps two blocks of memory of size kSize:
  113. // template <int kSize> void memswap(char* p, char* q);
  114. template<int kSize>
  115. inline typename std::enable_if<(kSize == 0), void>::type memswap(char*, char*)
  116. {
  117. }
  118. #define PROTO_MEMSWAP_DEF_SIZE(reg_type, max_size) \
  119. template<int kSize> \
  120. typename std::enable_if<(kSize >= sizeof(reg_type) && kSize < (max_size)), void>::type \
  121. memswap(char* p, char* q) \
  122. { \
  123. SwapBlock<reg_type>(p, q); \
  124. memswap<kSize - sizeof(reg_type)>(p + sizeof(reg_type), q + sizeof(reg_type)); \
  125. }
  126. PROTO_MEMSWAP_DEF_SIZE(uint8_t, 2)
  127. PROTO_MEMSWAP_DEF_SIZE(uint16_t, 4)
  128. PROTO_MEMSWAP_DEF_SIZE(uint32_t, 8)
  129. #ifdef __SIZEOF_INT128__
  130. PROTO_MEMSWAP_DEF_SIZE(uint64_t, 16)
  131. PROTO_MEMSWAP_DEF_SIZE(__uint128_t, (1u << 31))
  132. #else
  133. PROTO_MEMSWAP_DEF_SIZE(uint64_t, (1u << 31))
  134. #endif
  135. #undef PROTO_MEMSWAP_DEF_SIZE
  136. template<typename Element>
  137. class RepeatedIterator;
  138. } // namespace internal
  139. // RepeatedField is used to represent repeated fields of a primitive type (in
  140. // other words, everything except strings and nested Messages). Most users will
  141. // not ever use a RepeatedField directly; they will use the get-by-index,
  142. // set-by-index, and add accessors that are generated for all repeated fields.
  143. template<typename Element>
  144. class RepeatedField final
  145. {
  146. static_assert(
  147. alignof(Arena) >= alignof(Element),
  148. "We only support types that have an alignment smaller than Arena"
  149. );
  150. public:
  151. constexpr RepeatedField();
  152. explicit RepeatedField(Arena* arena);
  153. RepeatedField(const RepeatedField& other);
  154. template<typename Iter, typename = typename std::enable_if<std::is_constructible<Element, decltype(*std::declval<Iter>())>::value>::type>
  155. RepeatedField(Iter begin, Iter end);
  156. ~RepeatedField();
  157. RepeatedField& operator=(const RepeatedField& other);
  158. RepeatedField(RepeatedField&& other) noexcept;
  159. RepeatedField& operator=(RepeatedField&& other) noexcept;
  160. bool empty() const;
  161. int size() const;
  162. const Element& Get(int index) const;
  163. Element* Mutable(int index);
  164. const Element& operator[](int index) const
  165. {
  166. return Get(index);
  167. }
  168. Element& operator[](int index)
  169. {
  170. return *Mutable(index);
  171. }
  172. const Element& at(int index) const;
  173. Element& at(int index);
  174. void Set(int index, const Element& value);
  175. void Add(const Element& value);
  176. // Appends a new element and returns a pointer to it.
  177. // The new element is uninitialized if |Element| is a POD type.
  178. Element* Add();
  179. // Appends elements in the range [begin, end) after reserving
  180. // the appropriate number of elements.
  181. template<typename Iter>
  182. void Add(Iter begin, Iter end);
  183. // Removes the last element in the array.
  184. void RemoveLast();
  185. // Extracts elements with indices in "[start .. start+num-1]".
  186. // Copies them into "elements[0 .. num-1]" if "elements" is not nullptr.
  187. // Caution: also moves elements with indices [start+num ..].
  188. // Calling this routine inside a loop can cause quadratic behavior.
  189. void ExtractSubrange(int start, int num, Element* elements);
  190. PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear();
  191. void MergeFrom(const RepeatedField& other);
  192. PROTOBUF_ATTRIBUTE_REINITIALIZES void CopyFrom(const RepeatedField& other);
  193. // Replaces the contents with RepeatedField(begin, end).
  194. template<typename Iter>
  195. PROTOBUF_ATTRIBUTE_REINITIALIZES void Assign(Iter begin, Iter end);
  196. // Reserves space to expand the field to at least the given size. If the
  197. // array is grown, it will always be at least doubled in size.
  198. void Reserve(int new_size);
  199. // Resizes the RepeatedField to a new, smaller size. This is O(1).
  200. void Truncate(int new_size);
  201. void AddAlreadyReserved(const Element& value);
  202. // Appends a new element and return a pointer to it.
  203. // The new element is uninitialized if |Element| is a POD type.
  204. // Should be called only if Capacity() > Size().
  205. Element* AddAlreadyReserved();
  206. Element* AddNAlreadyReserved(int elements);
  207. int Capacity() const;
  208. // Like STL resize. Uses value to fill appended elements.
  209. // Like Truncate() if new_size <= size(), otherwise this is
  210. // O(new_size - size()).
  211. void Resize(int new_size, const Element& value);
  212. // Gets the underlying array. This pointer is possibly invalidated by
  213. // any add or remove operation.
  214. Element* mutable_data();
  215. const Element* data() const;
  216. // Swaps entire contents with "other". If they are separate arenas then,
  217. // copies data between each other.
  218. void Swap(RepeatedField* other);
  219. // Swaps entire contents with "other". Should be called only if the caller can
  220. // guarantee that both repeated fields are on the same arena or are on the
  221. // heap. Swapping between different arenas is disallowed and caught by a
  222. // GOOGLE_DCHECK (see API docs for details).
  223. void UnsafeArenaSwap(RepeatedField* other);
  224. // Swaps two elements.
  225. void SwapElements(int index1, int index2);
  226. // STL-like iterator support
  227. typedef internal::RepeatedIterator<Element> iterator;
  228. typedef internal::RepeatedIterator<const Element> const_iterator;
  229. typedef Element value_type;
  230. typedef value_type& reference;
  231. typedef const value_type& const_reference;
  232. typedef value_type* pointer;
  233. typedef const value_type* const_pointer;
  234. typedef int size_type;
  235. typedef ptrdiff_t difference_type;
  236. iterator begin();
  237. const_iterator begin() const;
  238. const_iterator cbegin() const;
  239. iterator end();
  240. const_iterator end() const;
  241. const_iterator cend() const;
  242. // Reverse iterator support
  243. typedef std::reverse_iterator<const_iterator> const_reverse_iterator;
  244. typedef std::reverse_iterator<iterator> reverse_iterator;
  245. reverse_iterator rbegin()
  246. {
  247. return reverse_iterator(end());
  248. }
  249. const_reverse_iterator rbegin() const
  250. {
  251. return const_reverse_iterator(end());
  252. }
  253. reverse_iterator rend()
  254. {
  255. return reverse_iterator(begin());
  256. }
  257. const_reverse_iterator rend() const
  258. {
  259. return const_reverse_iterator(begin());
  260. }
  261. // Returns the number of bytes used by the repeated field, excluding
  262. // sizeof(*this)
  263. size_t SpaceUsedExcludingSelfLong() const;
  264. int SpaceUsedExcludingSelf() const
  265. {
  266. return internal::ToIntSize(SpaceUsedExcludingSelfLong());
  267. }
  268. // Removes the element referenced by position.
  269. //
  270. // Returns an iterator to the element immediately following the removed
  271. // element.
  272. //
  273. // Invalidates all iterators at or after the removed element, including end().
  274. iterator erase(const_iterator position);
  275. // Removes the elements in the range [first, last).
  276. //
  277. // Returns an iterator to the element immediately following the removed range.
  278. //
  279. // Invalidates all iterators at or after the removed range, including end().
  280. iterator erase(const_iterator first, const_iterator last);
  281. // Gets the Arena on which this RepeatedField stores its elements.
  282. inline Arena* GetArena() const
  283. {
  284. return GetOwningArena();
  285. }
  286. // For internal use only.
  287. //
  288. // This is public due to it being called by generated code.
  289. inline void InternalSwap(RepeatedField* other);
  290. private:
  291. template<typename T>
  292. friend class Arena::InternalHelper;
  293. // Gets the Arena on which this RepeatedField stores its elements.
  294. inline Arena* GetOwningArena() const
  295. {
  296. return (total_size_ == 0) ? static_cast<Arena*>(arena_or_elements_) : rep()->arena;
  297. }
  298. static constexpr int kInitialSize = 0;
  299. // A note on the representation here (see also comment below for
  300. // RepeatedPtrFieldBase's struct Rep):
  301. //
  302. // We maintain the same sizeof(RepeatedField) as before we added arena support
  303. // so that we do not degrade performance by bloating memory usage. Directly
  304. // adding an arena_ element to RepeatedField is quite costly. By using
  305. // indirection in this way, we keep the same size when the RepeatedField is
  306. // empty (common case), and add only an 8-byte header to the elements array
  307. // when non-empty. We make sure to place the size fields directly in the
  308. // RepeatedField class to avoid costly cache misses due to the indirection.
  309. int current_size_;
  310. int total_size_;
  311. // Pad the Rep after arena allow for power-of-two byte sizes when
  312. // sizeof(Element) > sizeof(Arena*). eg for 16-byte objects.
  313. static PROTOBUF_CONSTEXPR const size_t kRepHeaderSize =
  314. sizeof(Arena*) < sizeof(Element) ? sizeof(Element) : sizeof(Arena*);
  315. struct Rep
  316. {
  317. Arena* arena;
  318. Element* elements()
  319. {
  320. return reinterpret_cast<Element*>(reinterpret_cast<char*>(this) + kRepHeaderSize);
  321. }
  322. };
  323. // If total_size_ == 0 this points to an Arena otherwise it points to the
  324. // elements member of a Rep struct. Using this invariant allows the storage of
  325. // the arena pointer without an extra allocation in the constructor.
  326. void* arena_or_elements_;
  327. // Returns a pointer to elements array.
  328. // pre-condition: the array must have been allocated.
  329. Element* elements() const
  330. {
  331. GOOGLE_DCHECK_GT(total_size_, 0);
  332. // Because of above pre-condition this cast is safe.
  333. return unsafe_elements();
  334. }
  335. // Returns a pointer to elements array if it exists; otherwise either null or
  336. // an invalid pointer is returned. This only happens for empty repeated
  337. // fields, where you can't dereference this pointer anyway (it's empty).
  338. Element* unsafe_elements() const
  339. {
  340. return static_cast<Element*>(arena_or_elements_);
  341. }
  342. // Returns a pointer to the Rep struct.
  343. // pre-condition: the Rep must have been allocated, ie elements() is safe.
  344. Rep* rep() const
  345. {
  346. return reinterpret_cast<Rep*>(reinterpret_cast<char*>(elements()) - kRepHeaderSize);
  347. }
  348. friend class Arena;
  349. typedef void InternalArenaConstructable_;
  350. // Moves the contents of |from| into |to|, possibly clobbering |from| in the
  351. // process. For primitive types this is just a memcpy(), but it could be
  352. // specialized for non-primitive types to, say, swap each element instead.
  353. void MoveArray(Element* to, Element* from, int size);
  354. // Copies the elements of |from| into |to|.
  355. void CopyArray(Element* to, const Element* from, int size);
  356. // Internal helper to delete all elements and deallocate the storage.
  357. void InternalDeallocate(Rep* rep, int size, bool in_destructor)
  358. {
  359. if (rep != nullptr)
  360. {
  361. Element* e = &rep->elements()[0];
  362. if (!std::is_trivial<Element>::value)
  363. {
  364. Element* limit = &rep->elements()[size];
  365. for (; e < limit; e++)
  366. {
  367. e->~Element();
  368. }
  369. }
  370. const size_t bytes = size * sizeof(*e) + kRepHeaderSize;
  371. if (rep->arena == nullptr)
  372. {
  373. internal::SizedDelete(rep, bytes);
  374. }
  375. else if (!in_destructor)
  376. {
  377. // If we are in the destructor, we might be being destroyed as part of
  378. // the arena teardown. We can't try and return blocks to the arena then.
  379. rep->arena->ReturnArrayMemory(rep, bytes);
  380. }
  381. }
  382. }
  383. // This class is a performance wrapper around RepeatedField::Add(const T&)
  384. // function. In general unless a RepeatedField is a local stack variable LLVM
  385. // has a hard time optimizing Add. The machine code tends to be
  386. // loop:
  387. // mov %size, dword ptr [%repeated_field] // load
  388. // cmp %size, dword ptr [%repeated_field + 4]
  389. // jae fallback
  390. // mov %buffer, qword ptr [%repeated_field + 8]
  391. // mov dword [%buffer + %size * 4], %value
  392. // inc %size // increment
  393. // mov dword ptr [%repeated_field], %size // store
  394. // jmp loop
  395. //
  396. // This puts a load/store in each iteration of the important loop variable
  397. // size. It's a pretty bad compile that happens even in simple cases, but
  398. // largely the presence of the fallback path disturbs the compilers mem-to-reg
  399. // analysis.
  400. //
  401. // This class takes ownership of a repeated field for the duration of its
  402. // lifetime. The repeated field should not be accessed during this time, ie.
  403. // only access through this class is allowed. This class should always be a
  404. // function local stack variable. Intended use
  405. //
  406. // void AddSequence(const int* begin, const int* end, RepeatedField<int>* out)
  407. // {
  408. // RepeatedFieldAdder<int> adder(out); // Take ownership of out
  409. // for (auto it = begin; it != end; ++it) {
  410. // adder.Add(*it);
  411. // }
  412. // }
  413. //
  414. // Typically, due to the fact that adder is a local stack variable, the
  415. // compiler will be successful in mem-to-reg transformation and the machine
  416. // code will be loop: cmp %size, %capacity jae fallback mov dword ptr [%buffer
  417. // + %size * 4], %val inc %size jmp loop
  418. //
  419. // The first version executes at 7 cycles per iteration while the second
  420. // version executes at only 1 or 2 cycles.
  421. template<int = 0, bool = std::is_trivial<Element>::value>
  422. class FastAdderImpl
  423. {
  424. public:
  425. explicit FastAdderImpl(RepeatedField* rf) :
  426. repeated_field_(rf)
  427. {
  428. index_ = repeated_field_->current_size_;
  429. capacity_ = repeated_field_->total_size_;
  430. buffer_ = repeated_field_->unsafe_elements();
  431. }
  432. ~FastAdderImpl()
  433. {
  434. repeated_field_->current_size_ = index_;
  435. }
  436. void Add(Element val)
  437. {
  438. if (index_ == capacity_)
  439. {
  440. repeated_field_->current_size_ = index_;
  441. repeated_field_->Reserve(index_ + 1);
  442. capacity_ = repeated_field_->total_size_;
  443. buffer_ = repeated_field_->unsafe_elements();
  444. }
  445. buffer_[index_++] = val;
  446. }
  447. private:
  448. RepeatedField* repeated_field_;
  449. int index_;
  450. int capacity_;
  451. Element* buffer_;
  452. GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(FastAdderImpl);
  453. };
  454. // FastAdder is a wrapper for adding fields. The specialization above handles
  455. // POD types more efficiently than RepeatedField.
  456. template<int I>
  457. class FastAdderImpl<I, false>
  458. {
  459. public:
  460. explicit FastAdderImpl(RepeatedField* rf) :
  461. repeated_field_(rf)
  462. {
  463. }
  464. void Add(const Element& val)
  465. {
  466. repeated_field_->Add(val);
  467. }
  468. private:
  469. RepeatedField* repeated_field_;
  470. GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(FastAdderImpl);
  471. };
  472. using FastAdder = FastAdderImpl<>;
  473. friend class TestRepeatedFieldHelper;
  474. friend class ::google::protobuf::internal::ParseContext;
  475. };
  476. namespace internal
  477. {
  478. // This is a helper template to copy an array of elements efficiently when they
  479. // have a trivial copy constructor, and correctly otherwise. This really
  480. // shouldn't be necessary, but our compiler doesn't optimize std::copy very
  481. // effectively.
  482. template<typename Element, bool HasTrivialCopy = std::is_trivial<Element>::value>
  483. struct ElementCopier
  484. {
  485. void operator()(Element* to, const Element* from, int array_size);
  486. };
  487. } // namespace internal
  488. // implementation ====================================================
  489. template<typename Element>
  490. constexpr RepeatedField<Element>::RepeatedField() :
  491. current_size_(0),
  492. total_size_(0),
  493. arena_or_elements_(nullptr)
  494. {
  495. }
  496. template<typename Element>
  497. inline RepeatedField<Element>::RepeatedField(Arena* arena) :
  498. current_size_(0),
  499. total_size_(0),
  500. arena_or_elements_(arena)
  501. {
  502. }
  503. template<typename Element>
  504. inline RepeatedField<Element>::RepeatedField(const RepeatedField& other) :
  505. current_size_(0),
  506. total_size_(0),
  507. arena_or_elements_(nullptr)
  508. {
  509. if (other.current_size_ != 0)
  510. {
  511. Reserve(other.size());
  512. AddNAlreadyReserved(other.size());
  513. CopyArray(Mutable(0), &other.Get(0), other.size());
  514. }
  515. }
  516. template<typename Element>
  517. template<typename Iter, typename>
  518. RepeatedField<Element>::RepeatedField(Iter begin, Iter end) :
  519. current_size_(0),
  520. total_size_(0),
  521. arena_or_elements_(nullptr)
  522. {
  523. Add(begin, end);
  524. }
  525. template<typename Element>
  526. RepeatedField<Element>::~RepeatedField()
  527. {
  528. #ifndef NDEBUG
  529. // Try to trigger segfault / asan failure in non-opt builds if arena_
  530. // lifetime has ended before the destructor.
  531. auto arena = GetOwningArena();
  532. if (arena)
  533. (void)arena->SpaceAllocated();
  534. #endif
  535. if (total_size_ > 0)
  536. {
  537. InternalDeallocate(rep(), total_size_, true);
  538. }
  539. }
  540. template<typename Element>
  541. inline RepeatedField<Element>& RepeatedField<Element>::operator=(
  542. const RepeatedField& other
  543. )
  544. {
  545. if (this != &other)
  546. CopyFrom(other);
  547. return *this;
  548. }
  549. template<typename Element>
  550. inline RepeatedField<Element>::RepeatedField(RepeatedField&& other) noexcept
  551. :
  552. RepeatedField()
  553. {
  554. #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
  555. CopyFrom(other);
  556. #else // PROTOBUF_FORCE_COPY_IN_MOVE
  557. // We don't just call Swap(&other) here because it would perform 3 copies if
  558. // other is on an arena. This field can't be on an arena because arena
  559. // construction always uses the Arena* accepting constructor.
  560. if (other.GetOwningArena())
  561. {
  562. CopyFrom(other);
  563. }
  564. else
  565. {
  566. InternalSwap(&other);
  567. }
  568. #endif // !PROTOBUF_FORCE_COPY_IN_MOVE
  569. }
  570. template<typename Element>
  571. inline RepeatedField<Element>& RepeatedField<Element>::operator=(
  572. RepeatedField&& other
  573. ) noexcept
  574. {
  575. // We don't just call Swap(&other) here because it would perform 3 copies if
  576. // the two fields are on different arenas.
  577. if (this != &other)
  578. {
  579. if (GetOwningArena() != other.GetOwningArena()
  580. #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
  581. || GetOwningArena() == nullptr
  582. #endif // !PROTOBUF_FORCE_COPY_IN_MOVE
  583. )
  584. {
  585. CopyFrom(other);
  586. }
  587. else
  588. {
  589. InternalSwap(&other);
  590. }
  591. }
  592. return *this;
  593. }
  594. template<typename Element>
  595. inline bool RepeatedField<Element>::empty() const
  596. {
  597. return current_size_ == 0;
  598. }
  599. template<typename Element>
  600. inline int RepeatedField<Element>::size() const
  601. {
  602. return current_size_;
  603. }
  604. template<typename Element>
  605. inline int RepeatedField<Element>::Capacity() const
  606. {
  607. return total_size_;
  608. }
  609. template<typename Element>
  610. inline void RepeatedField<Element>::AddAlreadyReserved(const Element& value)
  611. {
  612. GOOGLE_DCHECK_LT(current_size_, total_size_);
  613. elements()[current_size_++] = value;
  614. }
  615. template<typename Element>
  616. inline Element* RepeatedField<Element>::AddAlreadyReserved()
  617. {
  618. GOOGLE_DCHECK_LT(current_size_, total_size_);
  619. return &elements()[current_size_++];
  620. }
  621. template<typename Element>
  622. inline Element* RepeatedField<Element>::AddNAlreadyReserved(int elements)
  623. {
  624. GOOGLE_DCHECK_GE(total_size_ - current_size_, elements)
  625. << total_size_ << ", " << current_size_;
  626. // Warning: sometimes people call this when elements == 0 and
  627. // total_size_ == 0. In this case the return pointer points to a zero size
  628. // array (n == 0). Hence we can just use unsafe_elements(), because the user
  629. // cannot dereference the pointer anyway.
  630. Element* ret = unsafe_elements() + current_size_;
  631. current_size_ += elements;
  632. return ret;
  633. }
  634. template<typename Element>
  635. inline void RepeatedField<Element>::Resize(int new_size, const Element& value)
  636. {
  637. GOOGLE_DCHECK_GE(new_size, 0);
  638. if (new_size > current_size_)
  639. {
  640. Reserve(new_size);
  641. std::fill(&elements()[current_size_], &elements()[new_size], value);
  642. }
  643. current_size_ = new_size;
  644. }
  645. template<typename Element>
  646. inline const Element& RepeatedField<Element>::Get(int index) const
  647. {
  648. GOOGLE_DCHECK_GE(index, 0);
  649. GOOGLE_DCHECK_LT(index, current_size_);
  650. return elements()[index];
  651. }
  652. template<typename Element>
  653. inline const Element& RepeatedField<Element>::at(int index) const
  654. {
  655. GOOGLE_CHECK_GE(index, 0);
  656. GOOGLE_CHECK_LT(index, current_size_);
  657. return elements()[index];
  658. }
  659. template<typename Element>
  660. inline Element& RepeatedField<Element>::at(int index)
  661. {
  662. GOOGLE_CHECK_GE(index, 0);
  663. GOOGLE_CHECK_LT(index, current_size_);
  664. return elements()[index];
  665. }
  666. template<typename Element>
  667. inline Element* RepeatedField<Element>::Mutable(int index)
  668. {
  669. GOOGLE_DCHECK_GE(index, 0);
  670. GOOGLE_DCHECK_LT(index, current_size_);
  671. return &elements()[index];
  672. }
  673. template<typename Element>
  674. inline void RepeatedField<Element>::Set(int index, const Element& value)
  675. {
  676. GOOGLE_DCHECK_GE(index, 0);
  677. GOOGLE_DCHECK_LT(index, current_size_);
  678. elements()[index] = value;
  679. }
  680. template<typename Element>
  681. inline void RepeatedField<Element>::Add(const Element& value)
  682. {
  683. uint32_t size = current_size_;
  684. if (static_cast<int>(size) == total_size_)
  685. {
  686. // value could reference an element of the array. Reserving new space will
  687. // invalidate the reference. So we must make a copy first.
  688. auto tmp = value;
  689. Reserve(total_size_ + 1);
  690. elements()[size] = std::move(tmp);
  691. }
  692. else
  693. {
  694. elements()[size] = value;
  695. }
  696. current_size_ = size + 1;
  697. }
  698. template<typename Element>
  699. inline Element* RepeatedField<Element>::Add()
  700. {
  701. uint32_t size = current_size_;
  702. if (static_cast<int>(size) == total_size_)
  703. Reserve(total_size_ + 1);
  704. auto ptr = &elements()[size];
  705. current_size_ = size + 1;
  706. return ptr;
  707. }
  708. template<typename Element>
  709. template<typename Iter>
  710. inline void RepeatedField<Element>::Add(Iter begin, Iter end)
  711. {
  712. int reserve = internal::CalculateReserve(begin, end);
  713. if (reserve != -1)
  714. {
  715. if (reserve == 0)
  716. {
  717. return;
  718. }
  719. Reserve(reserve + size());
  720. // TODO(ckennelly): The compiler loses track of the buffer freshly
  721. // allocated by Reserve() by the time we call elements, so it cannot
  722. // guarantee that elements does not alias [begin(), end()).
  723. //
  724. // If restrict is available, annotating the pointer obtained from elements()
  725. // causes this to lower to memcpy instead of memmove.
  726. std::copy(begin, end, elements() + size());
  727. current_size_ = reserve + size();
  728. }
  729. else
  730. {
  731. FastAdder fast_adder(this);
  732. for (; begin != end; ++begin)
  733. fast_adder.Add(*begin);
  734. }
  735. }
  736. template<typename Element>
  737. inline void RepeatedField<Element>::RemoveLast()
  738. {
  739. GOOGLE_DCHECK_GT(current_size_, 0);
  740. current_size_--;
  741. }
  742. template<typename Element>
  743. void RepeatedField<Element>::ExtractSubrange(int start, int num, Element* elements)
  744. {
  745. GOOGLE_DCHECK_GE(start, 0);
  746. GOOGLE_DCHECK_GE(num, 0);
  747. GOOGLE_DCHECK_LE(start + num, this->current_size_);
  748. // Save the values of the removed elements if requested.
  749. if (elements != nullptr)
  750. {
  751. for (int i = 0; i < num; ++i)
  752. elements[i] = this->Get(i + start);
  753. }
  754. // Slide remaining elements down to fill the gap.
  755. if (num > 0)
  756. {
  757. for (int i = start + num; i < this->current_size_; ++i)
  758. this->Set(i - num, this->Get(i));
  759. this->Truncate(this->current_size_ - num);
  760. }
  761. }
  762. template<typename Element>
  763. inline void RepeatedField<Element>::Clear()
  764. {
  765. current_size_ = 0;
  766. }
  767. template<typename Element>
  768. inline void RepeatedField<Element>::MergeFrom(const RepeatedField& other)
  769. {
  770. GOOGLE_DCHECK_NE(&other, this);
  771. if (other.current_size_ != 0)
  772. {
  773. int existing_size = size();
  774. Reserve(existing_size + other.size());
  775. AddNAlreadyReserved(other.size());
  776. CopyArray(Mutable(existing_size), &other.Get(0), other.size());
  777. }
  778. }
  779. template<typename Element>
  780. inline void RepeatedField<Element>::CopyFrom(const RepeatedField& other)
  781. {
  782. if (&other == this)
  783. return;
  784. Clear();
  785. MergeFrom(other);
  786. }
  787. template<typename Element>
  788. template<typename Iter>
  789. inline void RepeatedField<Element>::Assign(Iter begin, Iter end)
  790. {
  791. Clear();
  792. Add(begin, end);
  793. }
  794. template<typename Element>
  795. inline typename RepeatedField<Element>::iterator RepeatedField<Element>::erase(
  796. const_iterator position
  797. )
  798. {
  799. return erase(position, position + 1);
  800. }
  801. template<typename Element>
  802. inline typename RepeatedField<Element>::iterator RepeatedField<Element>::erase(
  803. const_iterator first, const_iterator last
  804. )
  805. {
  806. size_type first_offset = first - cbegin();
  807. if (first != last)
  808. {
  809. Truncate(std::copy(last, cend(), begin() + first_offset) - cbegin());
  810. }
  811. return begin() + first_offset;
  812. }
  813. template<typename Element>
  814. inline Element* RepeatedField<Element>::mutable_data()
  815. {
  816. return unsafe_elements();
  817. }
  818. template<typename Element>
  819. inline const Element* RepeatedField<Element>::data() const
  820. {
  821. return unsafe_elements();
  822. }
  823. template<typename Element>
  824. inline void RepeatedField<Element>::InternalSwap(RepeatedField* other)
  825. {
  826. GOOGLE_DCHECK(this != other);
  827. // Swap all fields at once.
  828. static_assert(std::is_standard_layout<RepeatedField<Element>>::value, "offsetof() requires standard layout before c++17");
  829. internal::memswap<offsetof(RepeatedField, arena_or_elements_) + sizeof(this->arena_or_elements_) - offsetof(RepeatedField, current_size_)>(
  830. reinterpret_cast<char*>(this) + offsetof(RepeatedField, current_size_),
  831. reinterpret_cast<char*>(other) + offsetof(RepeatedField, current_size_)
  832. );
  833. }
  834. template<typename Element>
  835. void RepeatedField<Element>::Swap(RepeatedField* other)
  836. {
  837. if (this == other)
  838. return;
  839. #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
  840. if (GetOwningArena() != nullptr &&
  841. GetOwningArena() == other->GetOwningArena())
  842. {
  843. #else // PROTOBUF_FORCE_COPY_IN_SWAP
  844. if (GetOwningArena() == other->GetOwningArena())
  845. {
  846. #endif // !PROTOBUF_FORCE_COPY_IN_SWAP
  847. InternalSwap(other);
  848. }
  849. else
  850. {
  851. RepeatedField<Element> temp(other->GetOwningArena());
  852. temp.MergeFrom(*this);
  853. CopyFrom(*other);
  854. other->UnsafeArenaSwap(&temp);
  855. }
  856. }
  857. template<typename Element>
  858. void RepeatedField<Element>::UnsafeArenaSwap(RepeatedField* other)
  859. {
  860. if (this == other)
  861. return;
  862. GOOGLE_DCHECK_EQ(GetOwningArena(), other->GetOwningArena());
  863. InternalSwap(other);
  864. }
  865. template<typename Element>
  866. void RepeatedField<Element>::SwapElements(int index1, int index2)
  867. {
  868. using std::swap; // enable ADL with fallback
  869. swap(elements()[index1], elements()[index2]);
  870. }
  871. template<typename Element>
  872. inline typename RepeatedField<Element>::iterator
  873. RepeatedField<Element>::begin()
  874. {
  875. return iterator(unsafe_elements());
  876. }
  877. template<typename Element>
  878. inline typename RepeatedField<Element>::const_iterator
  879. RepeatedField<Element>::begin() const
  880. {
  881. return const_iterator(unsafe_elements());
  882. }
  883. template<typename Element>
  884. inline typename RepeatedField<Element>::const_iterator
  885. RepeatedField<Element>::cbegin() const
  886. {
  887. return const_iterator(unsafe_elements());
  888. }
  889. template<typename Element>
  890. inline typename RepeatedField<Element>::iterator RepeatedField<Element>::end()
  891. {
  892. return iterator(unsafe_elements() + current_size_);
  893. }
  894. template<typename Element>
  895. inline typename RepeatedField<Element>::const_iterator
  896. RepeatedField<Element>::end() const
  897. {
  898. return const_iterator(unsafe_elements() + current_size_);
  899. }
  900. template<typename Element>
  901. inline typename RepeatedField<Element>::const_iterator
  902. RepeatedField<Element>::cend() const
  903. {
  904. return const_iterator(unsafe_elements() + current_size_);
  905. }
  906. template<typename Element>
  907. inline size_t RepeatedField<Element>::SpaceUsedExcludingSelfLong() const
  908. {
  909. return total_size_ > 0 ? (total_size_ * sizeof(Element) + kRepHeaderSize) : 0;
  910. }
  911. namespace internal
  912. {
  913. // Returns the new size for a reserved field based on its 'total_size' and the
  914. // requested 'new_size'. The result is clamped to the closed interval:
  915. // [internal::kMinRepeatedFieldAllocationSize,
  916. // std::numeric_limits<int>::max()]
  917. // Requires:
  918. // new_size > total_size &&
  919. // (total_size == 0 ||
  920. // total_size >= kRepeatedFieldLowerClampLimit)
  921. template<typename T, int kRepHeaderSize>
  922. inline int CalculateReserveSize(int total_size, int new_size)
  923. {
  924. constexpr int lower_limit = RepeatedFieldLowerClampLimit<T, kRepHeaderSize>();
  925. if (new_size < lower_limit)
  926. {
  927. // Clamp to smallest allowed size.
  928. return lower_limit;
  929. }
  930. constexpr int kMaxSizeBeforeClamp =
  931. (std::numeric_limits<int>::max() - kRepHeaderSize) / 2;
  932. if (PROTOBUF_PREDICT_FALSE(total_size > kMaxSizeBeforeClamp))
  933. {
  934. return std::numeric_limits<int>::max();
  935. }
  936. // We want to double the number of bytes, not the number of elements, to try
  937. // to stay within power-of-two allocations.
  938. // The allocation has kRepHeaderSize + sizeof(T) * capacity.
  939. int doubled_size = 2 * total_size + kRepHeaderSize / sizeof(T);
  940. return std::max(doubled_size, new_size);
  941. }
  942. } // namespace internal
  943. // Avoid inlining of Reserve(): new, copy, and delete[] lead to a significant
  944. // amount of code bloat.
  945. template<typename Element>
  946. void RepeatedField<Element>::Reserve(int new_size)
  947. {
  948. if (total_size_ >= new_size)
  949. return;
  950. Rep* old_rep = total_size_ > 0 ? rep() : nullptr;
  951. Rep* new_rep;
  952. Arena* arena = GetOwningArena();
  953. new_size = internal::CalculateReserveSize<Element, kRepHeaderSize>(
  954. total_size_, new_size
  955. );
  956. GOOGLE_DCHECK_LE(
  957. static_cast<size_t>(new_size),
  958. (std::numeric_limits<size_t>::max() - kRepHeaderSize) / sizeof(Element)
  959. )
  960. << "Requested size is too large to fit into size_t.";
  961. size_t bytes =
  962. kRepHeaderSize + sizeof(Element) * static_cast<size_t>(new_size);
  963. if (arena == nullptr)
  964. {
  965. new_rep = static_cast<Rep*>(::operator new(bytes));
  966. }
  967. else
  968. {
  969. new_rep = reinterpret_cast<Rep*>(Arena::CreateArray<char>(arena, bytes));
  970. }
  971. new_rep->arena = arena;
  972. int old_total_size = total_size_;
  973. // Already known: new_size >= internal::kMinRepeatedFieldAllocationSize
  974. // Maintain invariant:
  975. // total_size_ == 0 ||
  976. // total_size_ >= internal::kMinRepeatedFieldAllocationSize
  977. total_size_ = new_size;
  978. arena_or_elements_ = new_rep->elements();
  979. // Invoke placement-new on newly allocated elements. We shouldn't have to do
  980. // this, since Element is supposed to be POD, but a previous version of this
  981. // code allocated storage with "new Element[size]" and some code uses
  982. // RepeatedField with non-POD types, relying on constructor invocation. If
  983. // Element has a trivial constructor (e.g., int32_t), gcc (tested with -O2)
  984. // completely removes this loop because the loop body is empty, so this has no
  985. // effect unless its side-effects are required for correctness.
  986. // Note that we do this before MoveArray() below because Element's copy
  987. // assignment implementation will want an initialized instance first.
  988. Element* e = &elements()[0];
  989. Element* limit = e + total_size_;
  990. for (; e < limit; e++)
  991. {
  992. new (e) Element;
  993. }
  994. if (current_size_ > 0)
  995. {
  996. MoveArray(&elements()[0], old_rep->elements(), current_size_);
  997. }
  998. // Likewise, we need to invoke destructors on the old array.
  999. InternalDeallocate(old_rep, old_total_size, false);
  1000. }
  1001. template<typename Element>
  1002. inline void RepeatedField<Element>::Truncate(int new_size)
  1003. {
  1004. GOOGLE_DCHECK_LE(new_size, current_size_);
  1005. if (current_size_ > 0)
  1006. {
  1007. current_size_ = new_size;
  1008. }
  1009. }
  1010. template<typename Element>
  1011. inline void RepeatedField<Element>::MoveArray(Element* to, Element* from, int array_size)
  1012. {
  1013. CopyArray(to, from, array_size);
  1014. }
  1015. template<typename Element>
  1016. inline void RepeatedField<Element>::CopyArray(Element* to, const Element* from, int array_size)
  1017. {
  1018. internal::ElementCopier<Element>()(to, from, array_size);
  1019. }
  1020. namespace internal
  1021. {
  1022. template<typename Element, bool HasTrivialCopy>
  1023. void ElementCopier<Element, HasTrivialCopy>::operator()(Element* to, const Element* from, int array_size)
  1024. {
  1025. std::copy(from, from + array_size, to);
  1026. }
  1027. template<typename Element>
  1028. struct ElementCopier<Element, true>
  1029. {
  1030. void operator()(Element* to, const Element* from, int array_size)
  1031. {
  1032. memcpy(to, from, static_cast<size_t>(array_size) * sizeof(Element));
  1033. }
  1034. };
  1035. } // namespace internal
  1036. // -------------------------------------------------------------------
  1037. // Iterators and helper functions that follow the spirit of the STL
  1038. // std::back_insert_iterator and std::back_inserter but are tailor-made
  1039. // for RepeatedField and RepeatedPtrField. Typical usage would be:
  1040. //
  1041. // std::copy(some_sequence.begin(), some_sequence.end(),
  1042. // RepeatedFieldBackInserter(proto.mutable_sequence()));
  1043. //
  1044. // Ported by johannes from util/gtl/proto-array-iterators.h
  1045. namespace internal
  1046. {
  1047. // STL-like iterator implementation for RepeatedField. You should not
  1048. // refer to this class directly; use RepeatedField<T>::iterator instead.
  1049. //
  1050. // Note: All of the iterator operators *must* be inlined to avoid performance
  1051. // regressions. This is caused by the extern template declarations below (which
  1052. // are required because of the RepeatedField extern template declarations). If
  1053. // any of these functions aren't explicitly inlined (e.g. defined in the class),
  1054. // the compiler isn't allowed to inline them.
  1055. template<typename Element>
  1056. class RepeatedIterator
  1057. {
  1058. public:
  1059. using iterator_category = std::random_access_iterator_tag;
  1060. // Note: remove_const is necessary for std::partial_sum, which uses value_type
  1061. // to determine the summation variable type.
  1062. using value_type = typename std::remove_const<Element>::type;
  1063. using difference_type = std::ptrdiff_t;
  1064. using pointer = Element*;
  1065. using reference = Element&;
  1066. constexpr RepeatedIterator() noexcept :
  1067. it_(nullptr)
  1068. {
  1069. }
  1070. // Allows "upcasting" from RepeatedIterator<T**> to
  1071. // RepeatedIterator<const T*const*>.
  1072. template<typename OtherElement, typename std::enable_if<std::is_convertible<OtherElement*, pointer>::value>::type* = nullptr>
  1073. constexpr RepeatedIterator(
  1074. const RepeatedIterator<OtherElement>& other
  1075. ) noexcept
  1076. :
  1077. it_(other.it_)
  1078. {
  1079. }
  1080. // dereferenceable
  1081. constexpr reference operator*() const noexcept
  1082. {
  1083. return *it_;
  1084. }
  1085. constexpr pointer operator->() const noexcept
  1086. {
  1087. return it_;
  1088. }
  1089. private:
  1090. // Helper alias to hide the internal type.
  1091. using iterator = RepeatedIterator<Element>;
  1092. public:
  1093. // {inc,dec}rementable
  1094. iterator& operator++() noexcept
  1095. {
  1096. ++it_;
  1097. return *this;
  1098. }
  1099. iterator operator++(int) noexcept
  1100. {
  1101. return iterator(it_++);
  1102. }
  1103. iterator& operator--() noexcept
  1104. {
  1105. --it_;
  1106. return *this;
  1107. }
  1108. iterator operator--(int) noexcept
  1109. {
  1110. return iterator(it_--);
  1111. }
  1112. // equality_comparable
  1113. friend constexpr bool operator==(const iterator& x, const iterator& y) noexcept
  1114. {
  1115. return x.it_ == y.it_;
  1116. }
  1117. friend constexpr bool operator!=(const iterator& x, const iterator& y) noexcept
  1118. {
  1119. return x.it_ != y.it_;
  1120. }
  1121. // less_than_comparable
  1122. friend constexpr bool operator<(const iterator& x, const iterator& y) noexcept
  1123. {
  1124. return x.it_ < y.it_;
  1125. }
  1126. friend constexpr bool operator<=(const iterator& x, const iterator& y) noexcept
  1127. {
  1128. return x.it_ <= y.it_;
  1129. }
  1130. friend constexpr bool operator>(const iterator& x, const iterator& y) noexcept
  1131. {
  1132. return x.it_ > y.it_;
  1133. }
  1134. friend constexpr bool operator>=(const iterator& x, const iterator& y) noexcept
  1135. {
  1136. return x.it_ >= y.it_;
  1137. }
  1138. // addable, subtractable
  1139. iterator& operator+=(difference_type d) noexcept
  1140. {
  1141. it_ += d;
  1142. return *this;
  1143. }
  1144. constexpr iterator operator+(difference_type d) const noexcept
  1145. {
  1146. return iterator(it_ + d);
  1147. }
  1148. friend constexpr iterator operator+(const difference_type d, iterator it) noexcept
  1149. {
  1150. return it + d;
  1151. }
  1152. iterator& operator-=(difference_type d) noexcept
  1153. {
  1154. it_ -= d;
  1155. return *this;
  1156. }
  1157. iterator constexpr operator-(difference_type d) const noexcept
  1158. {
  1159. return iterator(it_ - d);
  1160. }
  1161. // indexable
  1162. constexpr reference operator[](difference_type d) const noexcept
  1163. {
  1164. return it_[d];
  1165. }
  1166. // random access iterator
  1167. friend constexpr difference_type operator-(iterator it1, iterator it2) noexcept
  1168. {
  1169. return it1.it_ - it2.it_;
  1170. }
  1171. private:
  1172. template<typename OtherElement>
  1173. friend class RepeatedIterator;
  1174. // Allow construction from RepeatedField.
  1175. friend class RepeatedField<value_type>;
  1176. explicit RepeatedIterator(Element* it) noexcept :
  1177. it_(it)
  1178. {
  1179. }
  1180. // The internal iterator.
  1181. Element* it_;
  1182. };
  1183. // A back inserter for RepeatedField objects.
  1184. template<typename T>
  1185. class RepeatedFieldBackInsertIterator
  1186. {
  1187. public:
  1188. using iterator_category = std::output_iterator_tag;
  1189. using value_type = T;
  1190. using pointer = void;
  1191. using reference = void;
  1192. using difference_type = std::ptrdiff_t;
  1193. explicit RepeatedFieldBackInsertIterator(
  1194. RepeatedField<T>* const mutable_field
  1195. ) :
  1196. field_(mutable_field)
  1197. {
  1198. }
  1199. RepeatedFieldBackInsertIterator<T>& operator=(const T& value)
  1200. {
  1201. field_->Add(value);
  1202. return *this;
  1203. }
  1204. RepeatedFieldBackInsertIterator<T>& operator*()
  1205. {
  1206. return *this;
  1207. }
  1208. RepeatedFieldBackInsertIterator<T>& operator++()
  1209. {
  1210. return *this;
  1211. }
  1212. RepeatedFieldBackInsertIterator<T>& operator++(int /* unused */)
  1213. {
  1214. return *this;
  1215. }
  1216. private:
  1217. RepeatedField<T>* field_;
  1218. };
  1219. } // namespace internal
  1220. // Provides a back insert iterator for RepeatedField instances,
  1221. // similar to std::back_inserter().
  1222. template<typename T>
  1223. internal::RepeatedFieldBackInsertIterator<T> RepeatedFieldBackInserter(
  1224. RepeatedField<T>* const mutable_field
  1225. )
  1226. {
  1227. return internal::RepeatedFieldBackInsertIterator<T>(mutable_field);
  1228. }
  1229. // Extern declarations of common instantiations to reduce library bloat.
  1230. extern template class PROTOBUF_EXPORT_TEMPLATE_DECLARE RepeatedField<bool>;
  1231. extern template class PROTOBUF_EXPORT_TEMPLATE_DECLARE RepeatedField<int32_t>;
  1232. extern template class PROTOBUF_EXPORT_TEMPLATE_DECLARE RepeatedField<uint32_t>;
  1233. extern template class PROTOBUF_EXPORT_TEMPLATE_DECLARE RepeatedField<int64_t>;
  1234. extern template class PROTOBUF_EXPORT_TEMPLATE_DECLARE RepeatedField<uint64_t>;
  1235. extern template class PROTOBUF_EXPORT_TEMPLATE_DECLARE RepeatedField<float>;
  1236. extern template class PROTOBUF_EXPORT_TEMPLATE_DECLARE RepeatedField<double>;
  1237. namespace internal
  1238. {
  1239. extern template class PROTOBUF_EXPORT_TEMPLATE_DECLARE RepeatedIterator<bool>;
  1240. extern template class PROTOBUF_EXPORT_TEMPLATE_DECLARE
  1241. RepeatedIterator<int32_t>;
  1242. extern template class PROTOBUF_EXPORT_TEMPLATE_DECLARE
  1243. RepeatedIterator<uint32_t>;
  1244. extern template class PROTOBUF_EXPORT_TEMPLATE_DECLARE
  1245. RepeatedIterator<int64_t>;
  1246. extern template class PROTOBUF_EXPORT_TEMPLATE_DECLARE
  1247. RepeatedIterator<uint64_t>;
  1248. extern template class PROTOBUF_EXPORT_TEMPLATE_DECLARE RepeatedIterator<float>;
  1249. extern template class PROTOBUF_EXPORT_TEMPLATE_DECLARE RepeatedIterator<double>;
  1250. } // namespace internal
  1251. } // namespace protobuf
  1252. } // namespace google
  1253. #include <google/protobuf/port_undef.inc>
  1254. #endif // GOOGLE_PROTOBUF_REPEATED_FIELD_H__