1// Protocol Buffers - Google's data interchange format
2// Copyright 2008 Google Inc. All rights reserved.
3// https://developers.google.com/protocol-buffers/
4//
5// Redistribution and use in source and binary forms, with or without
6// modification, are permitted provided that the following conditions are
7// met:
8//
9// * Redistributions of source code must retain the above copyright
10// notice, this list of conditions and the following disclaimer.
11// * Redistributions in binary form must reproduce the above
12// copyright notice, this list of conditions and the following disclaimer
13// in the documentation and/or other materials provided with the
14// distribution.
15// * Neither the name of Google Inc. nor the names of its
16// contributors may be used to endorse or promote products derived from
17// this software without specific prior written permission.
18//
19// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
20// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
21// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
22// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
23// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
24// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
25// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
26// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
27// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
28// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
29// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30
31// Author: kenton@google.com (Kenton Varda)
32// Based on original Protocol Buffers design by
33// Sanjay Ghemawat, Jeff Dean, and others.
34//
35// RepeatedField and RepeatedPtrField are used by generated protocol message
36// classes to manipulate repeated fields. These classes are very similar to
37// STL's vector, but include a number of optimizations found to be useful
38// specifically in the case of Protocol Buffers. RepeatedPtrField is
39// particularly different from STL vector as it manages ownership of the
40// pointers that it contains.
41//
42// This header covers RepeatedField.
43
44#ifndef GOOGLE_PROTOBUF_REPEATED_FIELD_H__
45#define GOOGLE_PROTOBUF_REPEATED_FIELD_H__
46
47
48#include <algorithm>
49#include <iterator>
50#include <limits>
51#include <string>
52#include <type_traits>
53#include <utility>
54
55#include <google/protobuf/stubs/logging.h>
56#include <google/protobuf/stubs/common.h>
57#include <google/protobuf/arena.h>
58#include <google/protobuf/port.h>
59#include <google/protobuf/message_lite.h>
60#include <google/protobuf/repeated_ptr_field.h>
61
62
63// Must be included last.
64#include <google/protobuf/port_def.inc>
65
66#ifdef SWIG
67#error "You cannot SWIG proto headers"
68#endif
69
70namespace google {
71namespace protobuf {
72
73class Message;
74
75namespace internal {
76
77template <typename T, int kRepHeaderSize>
78constexpr int RepeatedFieldLowerClampLimit() {
79 // The header is padded to be at least `sizeof(T)` when it would be smaller
80 // otherwise.
81 static_assert(sizeof(T) <= kRepHeaderSize, "");
82 // We want to pad the minimum size to be a power of two bytes, including the
83 // header.
84 // The first allocation is kRepHeaderSize bytes worth of elements for a total
85 // of 2*kRepHeaderSize bytes.
86 // For an 8-byte header, we allocate 8 bool, 2 ints, or 1 int64.
87 return kRepHeaderSize / sizeof(T);
88}
89
90// kRepeatedFieldUpperClampLimit is the lowest signed integer value that
91// overflows when multiplied by 2 (which is undefined behavior). Sizes above
92// this will clamp to the maximum int value instead of following exponential
93// growth when growing a repeated field.
94constexpr int kRepeatedFieldUpperClampLimit =
95 (std::numeric_limits<int>::max() / 2) + 1;
96
97template <typename Iter>
98inline int CalculateReserve(Iter begin, Iter end, std::forward_iterator_tag) {
99 return static_cast<int>(std::distance(begin, end));
100}
101
102template <typename Iter>
103inline int CalculateReserve(Iter /*begin*/, Iter /*end*/,
104 std::input_iterator_tag /*unused*/) {
105 return -1;
106}
107
108template <typename Iter>
109inline int CalculateReserve(Iter begin, Iter end) {
110 typedef typename std::iterator_traits<Iter>::iterator_category Category;
111 return CalculateReserve(begin, end, Category());
112}
113
114// Swaps two blocks of memory of size sizeof(T).
115template <typename T>
116inline void SwapBlock(char* p, char* q) {
117 T tmp;
118 memcpy(&tmp, p, sizeof(T));
119 memcpy(dest: p, src: q, n: sizeof(T));
120 memcpy(q, &tmp, sizeof(T));
121}
122
123// Swaps two blocks of memory of size kSize:
124// template <int kSize> void memswap(char* p, char* q);
125template <int kSize>
126inline typename std::enable_if<(kSize == 0), void>::type memswap(char*, char*) {
127}
128
129#define PROTO_MEMSWAP_DEF_SIZE(reg_type, max_size) \
130 template <int kSize> \
131 typename std::enable_if<(kSize >= sizeof(reg_type) && kSize < (max_size)), \
132 void>::type \
133 memswap(char* p, char* q) { \
134 SwapBlock<reg_type>(p, q); \
135 memswap<kSize - sizeof(reg_type)>(p + sizeof(reg_type), \
136 q + sizeof(reg_type)); \
137 }
138
139PROTO_MEMSWAP_DEF_SIZE(uint8_t, 2)
140PROTO_MEMSWAP_DEF_SIZE(uint16_t, 4)
141PROTO_MEMSWAP_DEF_SIZE(uint32_t, 8)
142
143#ifdef __SIZEOF_INT128__
144PROTO_MEMSWAP_DEF_SIZE(uint64_t, 16)
145PROTO_MEMSWAP_DEF_SIZE(__uint128_t, (1u << 31))
146#else
147PROTO_MEMSWAP_DEF_SIZE(uint64_t, (1u << 31))
148#endif
149
150#undef PROTO_MEMSWAP_DEF_SIZE
151
152template <typename Element>
153class RepeatedIterator;
154
155} // namespace internal
156
157// RepeatedField is used to represent repeated fields of a primitive type (in
158// other words, everything except strings and nested Messages). Most users will
159// not ever use a RepeatedField directly; they will use the get-by-index,
160// set-by-index, and add accessors that are generated for all repeated fields.
161template <typename Element>
162class RepeatedField final {
163 static_assert(
164 alignof(Arena) >= alignof(Element),
165 "We only support types that have an alignment smaller than Arena");
166
167 public:
168 constexpr RepeatedField();
169 explicit RepeatedField(Arena* arena);
170
171 RepeatedField(const RepeatedField& other);
172
173 template <typename Iter,
174 typename = typename std::enable_if<std::is_constructible<
175 Element, decltype(*std::declval<Iter>())>::value>::type>
176 RepeatedField(Iter begin, Iter end);
177
178 ~RepeatedField();
179
180 RepeatedField& operator=(const RepeatedField& other);
181
182 RepeatedField(RepeatedField&& other) noexcept;
183 RepeatedField& operator=(RepeatedField&& other) noexcept;
184
185 bool empty() const;
186 int size() const;
187
188 const Element& Get(int index) const;
189 Element* Mutable(int index);
190
191 const Element& operator[](int index) const { return Get(index); }
192 Element& operator[](int index) { return *Mutable(index); }
193
194 const Element& at(int index) const;
195 Element& at(int index);
196
197 void Set(int index, const Element& value);
198 void Add(const Element& value);
199 // Appends a new element and returns a pointer to it.
200 // The new element is uninitialized if |Element| is a POD type.
201 Element* Add();
202 // Appends elements in the range [begin, end) after reserving
203 // the appropriate number of elements.
204 template <typename Iter>
205 void Add(Iter begin, Iter end);
206
207 // Removes the last element in the array.
208 void RemoveLast();
209
210 // Extracts elements with indices in "[start .. start+num-1]".
211 // Copies them into "elements[0 .. num-1]" if "elements" is not nullptr.
212 // Caution: also moves elements with indices [start+num ..].
213 // Calling this routine inside a loop can cause quadratic behavior.
214 void ExtractSubrange(int start, int num, Element* elements);
215
216 PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear();
217 void MergeFrom(const RepeatedField& other);
218 PROTOBUF_ATTRIBUTE_REINITIALIZES void CopyFrom(const RepeatedField& other);
219
220 // Replaces the contents with RepeatedField(begin, end).
221 template <typename Iter>
222 PROTOBUF_ATTRIBUTE_REINITIALIZES void Assign(Iter begin, Iter end);
223
224 // Reserves space to expand the field to at least the given size. If the
225 // array is grown, it will always be at least doubled in size.
226 void Reserve(int new_size);
227
228 // Resizes the RepeatedField to a new, smaller size. This is O(1).
229 void Truncate(int new_size);
230
231 void AddAlreadyReserved(const Element& value);
232 // Appends a new element and return a pointer to it.
233 // The new element is uninitialized if |Element| is a POD type.
234 // Should be called only if Capacity() > Size().
235 Element* AddAlreadyReserved();
236 Element* AddNAlreadyReserved(int elements);
237 int Capacity() const;
238
239 // Like STL resize. Uses value to fill appended elements.
240 // Like Truncate() if new_size <= size(), otherwise this is
241 // O(new_size - size()).
242 void Resize(int new_size, const Element& value);
243
244 // Gets the underlying array. This pointer is possibly invalidated by
245 // any add or remove operation.
246 Element* mutable_data();
247 const Element* data() const;
248
249 // Swaps entire contents with "other". If they are separate arenas then,
250 // copies data between each other.
251 void Swap(RepeatedField* other);
252
253 // Swaps entire contents with "other". Should be called only if the caller can
254 // guarantee that both repeated fields are on the same arena or are on the
255 // heap. Swapping between different arenas is disallowed and caught by a
256 // GOOGLE_DCHECK (see API docs for details).
257 void UnsafeArenaSwap(RepeatedField* other);
258
259 // Swaps two elements.
260 void SwapElements(int index1, int index2);
261
262 // STL-like iterator support
263 typedef internal::RepeatedIterator<Element> iterator;
264 typedef internal::RepeatedIterator<const Element> const_iterator;
265 typedef Element value_type;
266 typedef value_type& reference;
267 typedef const value_type& const_reference;
268 typedef value_type* pointer;
269 typedef const value_type* const_pointer;
270 typedef int size_type;
271 typedef ptrdiff_t difference_type;
272
273 iterator begin();
274 const_iterator begin() const;
275 const_iterator cbegin() const;
276 iterator end();
277 const_iterator end() const;
278 const_iterator cend() const;
279
280 // Reverse iterator support
281 typedef std::reverse_iterator<const_iterator> const_reverse_iterator;
282 typedef std::reverse_iterator<iterator> reverse_iterator;
283 reverse_iterator rbegin() { return reverse_iterator(end()); }
284 const_reverse_iterator rbegin() const {
285 return const_reverse_iterator(end());
286 }
287 reverse_iterator rend() { return reverse_iterator(begin()); }
288 const_reverse_iterator rend() const {
289 return const_reverse_iterator(begin());
290 }
291
292 // Returns the number of bytes used by the repeated field, excluding
293 // sizeof(*this)
294 size_t SpaceUsedExcludingSelfLong() const;
295
296 int SpaceUsedExcludingSelf() const {
297 return internal::ToIntSize(size: SpaceUsedExcludingSelfLong());
298 }
299
300 // Removes the element referenced by position.
301 //
302 // Returns an iterator to the element immediately following the removed
303 // element.
304 //
305 // Invalidates all iterators at or after the removed element, including end().
306 iterator erase(const_iterator position);
307
308 // Removes the elements in the range [first, last).
309 //
310 // Returns an iterator to the element immediately following the removed range.
311 //
312 // Invalidates all iterators at or after the removed range, including end().
313 iterator erase(const_iterator first, const_iterator last);
314
315 // Gets the Arena on which this RepeatedField stores its elements.
316 inline Arena* GetArena() const {
317 return GetOwningArena();
318 }
319
320 // For internal use only.
321 //
322 // This is public due to it being called by generated code.
323 inline void InternalSwap(RepeatedField* other);
324
325 private:
326 template <typename T> friend class Arena::InternalHelper;
327
328 // Gets the Arena on which this RepeatedField stores its elements.
329 inline Arena* GetOwningArena() const {
330 return (total_size_ == 0) ? static_cast<Arena*>(arena_or_elements_)
331 : rep()->arena;
332 }
333
334 static constexpr int kInitialSize = 0;
335 // A note on the representation here (see also comment below for
336 // RepeatedPtrFieldBase's struct Rep):
337 //
338 // We maintain the same sizeof(RepeatedField) as before we added arena support
339 // so that we do not degrade performance by bloating memory usage. Directly
340 // adding an arena_ element to RepeatedField is quite costly. By using
341 // indirection in this way, we keep the same size when the RepeatedField is
342 // empty (common case), and add only an 8-byte header to the elements array
343 // when non-empty. We make sure to place the size fields directly in the
344 // RepeatedField class to avoid costly cache misses due to the indirection.
345 int current_size_;
346 int total_size_;
347 // Pad the Rep after arena allow for power-of-two byte sizes when
348 // sizeof(Element) > sizeof(Arena*). eg for 16-byte objects.
349 static PROTOBUF_CONSTEXPR const size_t kRepHeaderSize =
350 sizeof(Arena*) < sizeof(Element) ? sizeof(Element) : sizeof(Arena*);
351 struct Rep {
352 Arena* arena;
353 Element* elements() {
354 return reinterpret_cast<Element*>(reinterpret_cast<char*>(this) +
355 kRepHeaderSize);
356 }
357 };
358
359 // If total_size_ == 0 this points to an Arena otherwise it points to the
360 // elements member of a Rep struct. Using this invariant allows the storage of
361 // the arena pointer without an extra allocation in the constructor.
362 void* arena_or_elements_;
363
364 // Returns a pointer to elements array.
365 // pre-condition: the array must have been allocated.
366 Element* elements() const {
367 GOOGLE_DCHECK_GT(total_size_, 0);
368 // Because of above pre-condition this cast is safe.
369 return unsafe_elements();
370 }
371
372 // Returns a pointer to elements array if it exists; otherwise either null or
373 // an invalid pointer is returned. This only happens for empty repeated
374 // fields, where you can't dereference this pointer anyway (it's empty).
375 Element* unsafe_elements() const {
376 return static_cast<Element*>(arena_or_elements_);
377 }
378
379 // Returns a pointer to the Rep struct.
380 // pre-condition: the Rep must have been allocated, ie elements() is safe.
381 Rep* rep() const {
382 return reinterpret_cast<Rep*>(reinterpret_cast<char*>(elements()) -
383 kRepHeaderSize);
384 }
385
386 friend class Arena;
387 typedef void InternalArenaConstructable_;
388
389 // Moves the contents of |from| into |to|, possibly clobbering |from| in the
390 // process. For primitive types this is just a memcpy(), but it could be
391 // specialized for non-primitive types to, say, swap each element instead.
392 void MoveArray(Element* to, Element* from, int size);
393
394 // Copies the elements of |from| into |to|.
395 void CopyArray(Element* to, const Element* from, int size);
396
397 // Internal helper to delete all elements and deallocate the storage.
398 void InternalDeallocate(Rep* rep, int size, bool in_destructor) {
399 if (rep != nullptr) {
400 Element* e = &rep->elements()[0];
401 if (!std::is_trivial<Element>::value) {
402 Element* limit = &rep->elements()[size];
403 for (; e < limit; e++) {
404 e->~Element();
405 }
406 }
407 const size_t bytes = size * sizeof(*e) + kRepHeaderSize;
408 if (rep->arena == nullptr) {
409 internal::SizedDelete(p: rep, size: bytes);
410 } else if (!in_destructor) {
411 // If we are in the destructor, we might be being destroyed as part of
412 // the arena teardown. We can't try and return blocks to the arena then.
413 rep->arena->ReturnArrayMemory(rep, bytes);
414 }
415 }
416 }
417
418 // This class is a performance wrapper around RepeatedField::Add(const T&)
419 // function. In general unless a RepeatedField is a local stack variable LLVM
420 // has a hard time optimizing Add. The machine code tends to be
421 // loop:
422 // mov %size, dword ptr [%repeated_field] // load
423 // cmp %size, dword ptr [%repeated_field + 4]
424 // jae fallback
425 // mov %buffer, qword ptr [%repeated_field + 8]
426 // mov dword [%buffer + %size * 4], %value
427 // inc %size // increment
428 // mov dword ptr [%repeated_field], %size // store
429 // jmp loop
430 //
431 // This puts a load/store in each iteration of the important loop variable
432 // size. It's a pretty bad compile that happens even in simple cases, but
433 // largely the presence of the fallback path disturbs the compilers mem-to-reg
434 // analysis.
435 //
436 // This class takes ownership of a repeated field for the duration of its
437 // lifetime. The repeated field should not be accessed during this time, ie.
438 // only access through this class is allowed. This class should always be a
439 // function local stack variable. Intended use
440 //
441 // void AddSequence(const int* begin, const int* end, RepeatedField<int>* out)
442 // {
443 // RepeatedFieldAdder<int> adder(out); // Take ownership of out
444 // for (auto it = begin; it != end; ++it) {
445 // adder.Add(*it);
446 // }
447 // }
448 //
449 // Typically, due to the fact that adder is a local stack variable, the
450 // compiler will be successful in mem-to-reg transformation and the machine
451 // code will be loop: cmp %size, %capacity jae fallback mov dword ptr [%buffer
452 // + %size * 4], %val inc %size jmp loop
453 //
454 // The first version executes at 7 cycles per iteration while the second
455 // version executes at only 1 or 2 cycles.
456 template <int = 0, bool = std::is_trivial<Element>::value>
457 class FastAdderImpl {
458 public:
459 explicit FastAdderImpl(RepeatedField* rf) : repeated_field_(rf) {
460 index_ = repeated_field_->current_size_;
461 capacity_ = repeated_field_->total_size_;
462 buffer_ = repeated_field_->unsafe_elements();
463 }
464 ~FastAdderImpl() { repeated_field_->current_size_ = index_; }
465
466 void Add(Element val) {
467 if (index_ == capacity_) {
468 repeated_field_->current_size_ = index_;
469 repeated_field_->Reserve(index_ + 1);
470 capacity_ = repeated_field_->total_size_;
471 buffer_ = repeated_field_->unsafe_elements();
472 }
473 buffer_[index_++] = val;
474 }
475
476 private:
477 RepeatedField* repeated_field_;
478 int index_;
479 int capacity_;
480 Element* buffer_;
481
482 GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(FastAdderImpl);
483 };
484
485 // FastAdder is a wrapper for adding fields. The specialization above handles
486 // POD types more efficiently than RepeatedField.
487 template <int I>
488 class FastAdderImpl<I, false> {
489 public:
490 explicit FastAdderImpl(RepeatedField* rf) : repeated_field_(rf) {}
491 void Add(const Element& val) { repeated_field_->Add(val); }
492
493 private:
494 RepeatedField* repeated_field_;
495 GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(FastAdderImpl);
496 };
497
498 using FastAdder = FastAdderImpl<>;
499
500 friend class TestRepeatedFieldHelper;
501 friend class ::google::protobuf::internal::ParseContext;
502};
503
504namespace internal {
505
506// This is a helper template to copy an array of elements efficiently when they
507// have a trivial copy constructor, and correctly otherwise. This really
508// shouldn't be necessary, but our compiler doesn't optimize std::copy very
509// effectively.
510template <typename Element,
511 bool HasTrivialCopy = std::is_trivial<Element>::value>
512struct ElementCopier {
513 void operator()(Element* to, const Element* from, int array_size);
514};
515
516} // namespace internal
517
518// implementation ====================================================
519
520template <typename Element>
521constexpr RepeatedField<Element>::RepeatedField()
522 : current_size_(0), total_size_(0), arena_or_elements_(nullptr) {}
523
524template <typename Element>
525inline RepeatedField<Element>::RepeatedField(Arena* arena)
526 : current_size_(0), total_size_(0), arena_or_elements_(arena) {}
527
528template <typename Element>
529inline RepeatedField<Element>::RepeatedField(const RepeatedField& other)
530 : current_size_(0), total_size_(0), arena_or_elements_(nullptr) {
531 if (other.current_size_ != 0) {
532 Reserve(new_size: other.size());
533 AddNAlreadyReserved(elements: other.size());
534 CopyArray(to: Mutable(index: 0), from: &other.Get(0), size: other.size());
535 }
536}
537
538template <typename Element>
539template <typename Iter, typename>
540RepeatedField<Element>::RepeatedField(Iter begin, Iter end)
541 : current_size_(0), total_size_(0), arena_or_elements_(nullptr) {
542 Add(begin, end);
543}
544
545template <typename Element>
546RepeatedField<Element>::~RepeatedField() {
547#ifndef NDEBUG
548 // Try to trigger segfault / asan failure in non-opt builds if arena_
549 // lifetime has ended before the destructor.
550 auto arena = GetOwningArena();
551 if (arena) (void)arena->SpaceAllocated();
552#endif
553 if (total_size_ > 0) {
554 InternalDeallocate(rep: rep(), size: total_size_, in_destructor: true);
555 }
556}
557
558template <typename Element>
559inline RepeatedField<Element>& RepeatedField<Element>::operator=(
560 const RepeatedField& other) {
561 if (this != &other) CopyFrom(other);
562 return *this;
563}
564
565template <typename Element>
566inline RepeatedField<Element>::RepeatedField(RepeatedField&& other) noexcept
567 : RepeatedField() {
568#ifdef PROTOBUF_FORCE_COPY_IN_MOVE
569 CopyFrom(other);
570#else // PROTOBUF_FORCE_COPY_IN_MOVE
571 // We don't just call Swap(&other) here because it would perform 3 copies if
572 // other is on an arena. This field can't be on an arena because arena
573 // construction always uses the Arena* accepting constructor.
574 if (other.GetOwningArena()) {
575 CopyFrom(other);
576 } else {
577 InternalSwap(other: &other);
578 }
579#endif // !PROTOBUF_FORCE_COPY_IN_MOVE
580}
581
582template <typename Element>
583inline RepeatedField<Element>& RepeatedField<Element>::operator=(
584 RepeatedField&& other) noexcept {
585 // We don't just call Swap(&other) here because it would perform 3 copies if
586 // the two fields are on different arenas.
587 if (this != &other) {
588 if (GetOwningArena() != other.GetOwningArena()
589#ifdef PROTOBUF_FORCE_COPY_IN_MOVE
590 || GetOwningArena() == nullptr
591#endif // !PROTOBUF_FORCE_COPY_IN_MOVE
592 ) {
593 CopyFrom(other);
594 } else {
595 InternalSwap(other: &other);
596 }
597 }
598 return *this;
599}
600
601template <typename Element>
602inline bool RepeatedField<Element>::empty() const {
603 return current_size_ == 0;
604}
605
606template <typename Element>
607inline int RepeatedField<Element>::size() const {
608 return current_size_;
609}
610
611template <typename Element>
612inline int RepeatedField<Element>::Capacity() const {
613 return total_size_;
614}
615
616template <typename Element>
617inline void RepeatedField<Element>::AddAlreadyReserved(const Element& value) {
618 GOOGLE_DCHECK_LT(current_size_, total_size_);
619 elements()[current_size_++] = value;
620}
621
622template <typename Element>
623inline Element* RepeatedField<Element>::AddAlreadyReserved() {
624 GOOGLE_DCHECK_LT(current_size_, total_size_);
625 return &elements()[current_size_++];
626}
627
628template <typename Element>
629inline Element* RepeatedField<Element>::AddNAlreadyReserved(int elements) {
630 GOOGLE_DCHECK_GE(total_size_ - current_size_, elements)
631 << total_size_ << ", " << current_size_;
632 // Warning: sometimes people call this when elements == 0 and
633 // total_size_ == 0. In this case the return pointer points to a zero size
634 // array (n == 0). Hence we can just use unsafe_elements(), because the user
635 // cannot dereference the pointer anyway.
636 Element* ret = unsafe_elements() + current_size_;
637 current_size_ += elements;
638 return ret;
639}
640
641template <typename Element>
642inline void RepeatedField<Element>::Resize(int new_size, const Element& value) {
643 GOOGLE_DCHECK_GE(new_size, 0);
644 if (new_size > current_size_) {
645 Reserve(new_size);
646 std::fill(&elements()[current_size_], &elements()[new_size], value);
647 }
648 current_size_ = new_size;
649}
650
651template <typename Element>
652inline const Element& RepeatedField<Element>::Get(int index) const {
653 GOOGLE_DCHECK_GE(index, 0);
654 GOOGLE_DCHECK_LT(index, current_size_);
655 return elements()[index];
656}
657
658template <typename Element>
659inline const Element& RepeatedField<Element>::at(int index) const {
660 GOOGLE_CHECK_GE(index, 0);
661 GOOGLE_CHECK_LT(index, current_size_);
662 return elements()[index];
663}
664
665template <typename Element>
666inline Element& RepeatedField<Element>::at(int index) {
667 GOOGLE_CHECK_GE(index, 0);
668 GOOGLE_CHECK_LT(index, current_size_);
669 return elements()[index];
670}
671
672template <typename Element>
673inline Element* RepeatedField<Element>::Mutable(int index) {
674 GOOGLE_DCHECK_GE(index, 0);
675 GOOGLE_DCHECK_LT(index, current_size_);
676 return &elements()[index];
677}
678
679template <typename Element>
680inline void RepeatedField<Element>::Set(int index, const Element& value) {
681 GOOGLE_DCHECK_GE(index, 0);
682 GOOGLE_DCHECK_LT(index, current_size_);
683 elements()[index] = value;
684}
685
686template <typename Element>
687inline void RepeatedField<Element>::Add(const Element& value) {
688 uint32_t size = current_size_;
689 if (static_cast<int>(size) == total_size_) {
690 // value could reference an element of the array. Reserving new space will
691 // invalidate the reference. So we must make a copy first.
692 auto tmp = value;
693 Reserve(new_size: total_size_ + 1);
694 elements()[size] = std::move(tmp);
695 } else {
696 elements()[size] = value;
697 }
698 current_size_ = size + 1;
699}
700
701template <typename Element>
702inline Element* RepeatedField<Element>::Add() {
703 uint32_t size = current_size_;
704 if (static_cast<int>(size) == total_size_) Reserve(new_size: total_size_ + 1);
705 auto ptr = &elements()[size];
706 current_size_ = size + 1;
707 return ptr;
708}
709
710template <typename Element>
711template <typename Iter>
712inline void RepeatedField<Element>::Add(Iter begin, Iter end) {
713 int reserve = internal::CalculateReserve(begin, end);
714 if (reserve != -1) {
715 if (reserve == 0) {
716 return;
717 }
718
719 Reserve(new_size: reserve + size());
720 // TODO(ckennelly): The compiler loses track of the buffer freshly
721 // allocated by Reserve() by the time we call elements, so it cannot
722 // guarantee that elements does not alias [begin(), end()).
723 //
724 // If restrict is available, annotating the pointer obtained from elements()
725 // causes this to lower to memcpy instead of memmove.
726 std::copy(begin, end, elements() + size());
727 current_size_ = reserve + size();
728 } else {
729 FastAdder fast_adder(this);
730 for (; begin != end; ++begin) fast_adder.Add(*begin);
731 }
732}
733
734template <typename Element>
735inline void RepeatedField<Element>::RemoveLast() {
736 GOOGLE_DCHECK_GT(current_size_, 0);
737 current_size_--;
738}
739
740template <typename Element>
741void RepeatedField<Element>::ExtractSubrange(int start, int num,
742 Element* elements) {
743 GOOGLE_DCHECK_GE(start, 0);
744 GOOGLE_DCHECK_GE(num, 0);
745 GOOGLE_DCHECK_LE(start + num, this->current_size_);
746
747 // Save the values of the removed elements if requested.
748 if (elements != nullptr) {
749 for (int i = 0; i < num; ++i) elements[i] = this->Get(i + start);
750 }
751
752 // Slide remaining elements down to fill the gap.
753 if (num > 0) {
754 for (int i = start + num; i < this->current_size_; ++i)
755 this->Set(i - num, this->Get(i));
756 this->Truncate(this->current_size_ - num);
757 }
758}
759
760template <typename Element>
761inline void RepeatedField<Element>::Clear() {
762 current_size_ = 0;
763}
764
765template <typename Element>
766inline void RepeatedField<Element>::MergeFrom(const RepeatedField& other) {
767 GOOGLE_DCHECK_NE(&other, this);
768 if (other.current_size_ != 0) {
769 int existing_size = size();
770 Reserve(new_size: existing_size + other.size());
771 AddNAlreadyReserved(elements: other.size());
772 CopyArray(to: Mutable(index: existing_size), from: &other.Get(0), size: other.size());
773 }
774}
775
776template <typename Element>
777inline void RepeatedField<Element>::CopyFrom(const RepeatedField& other) {
778 if (&other == this) return;
779 Clear();
780 MergeFrom(other);
781}
782
783template <typename Element>
784template <typename Iter>
785inline void RepeatedField<Element>::Assign(Iter begin, Iter end) {
786 Clear();
787 Add(begin, end);
788}
789
790template <typename Element>
791inline typename RepeatedField<Element>::iterator RepeatedField<Element>::erase(
792 const_iterator position) {
793 return erase(position, position + 1);
794}
795
796template <typename Element>
797inline typename RepeatedField<Element>::iterator RepeatedField<Element>::erase(
798 const_iterator first, const_iterator last) {
799 size_type first_offset = first - cbegin();
800 if (first != last) {
801 Truncate(new_size: std::copy(last, cend(), begin() + first_offset) - cbegin());
802 }
803 return begin() + first_offset;
804}
805
806template <typename Element>
807inline Element* RepeatedField<Element>::mutable_data() {
808 return unsafe_elements();
809}
810
811template <typename Element>
812inline const Element* RepeatedField<Element>::data() const {
813 return unsafe_elements();
814}
815
816template <typename Element>
817inline void RepeatedField<Element>::InternalSwap(RepeatedField* other) {
818 GOOGLE_DCHECK(this != other);
819
820 // Swap all fields at once.
821 static_assert(std::is_standard_layout<RepeatedField<Element>>::value,
822 "offsetof() requires standard layout before c++17");
823 internal::memswap<offsetof(RepeatedField, arena_or_elements_) +
824 sizeof(this->arena_or_elements_) -
825 offsetof(RepeatedField, current_size_)>(
826 reinterpret_cast<char*>(this) + offsetof(RepeatedField, current_size_),
827 reinterpret_cast<char*>(other) + offsetof(RepeatedField, current_size_));
828}
829
830template <typename Element>
831void RepeatedField<Element>::Swap(RepeatedField* other) {
832 if (this == other) return;
833#ifdef PROTOBUF_FORCE_COPY_IN_SWAP
834 if (GetOwningArena() != nullptr &&
835 GetOwningArena() == other->GetOwningArena()) {
836#else // PROTOBUF_FORCE_COPY_IN_SWAP
837 if (GetOwningArena() == other->GetOwningArena()) {
838#endif // !PROTOBUF_FORCE_COPY_IN_SWAP
839 InternalSwap(other);
840 } else {
841 RepeatedField<Element> temp(other->GetOwningArena());
842 temp.MergeFrom(*this);
843 CopyFrom(other: *other);
844 other->UnsafeArenaSwap(&temp);
845 }
846}
847
848template <typename Element>
849void RepeatedField<Element>::UnsafeArenaSwap(RepeatedField* other) {
850 if (this == other) return;
851 GOOGLE_DCHECK_EQ(GetOwningArena(), other->GetOwningArena());
852 InternalSwap(other);
853}
854
855template <typename Element>
856void RepeatedField<Element>::SwapElements(int index1, int index2) {
857 using std::swap; // enable ADL with fallback
858 swap(elements()[index1], elements()[index2]);
859}
860
861template <typename Element>
862inline typename RepeatedField<Element>::iterator
863RepeatedField<Element>::begin() {
864 return iterator(unsafe_elements());
865}
866template <typename Element>
867inline typename RepeatedField<Element>::const_iterator
868RepeatedField<Element>::begin() const {
869 return const_iterator(unsafe_elements());
870}
871template <typename Element>
872inline typename RepeatedField<Element>::const_iterator
873RepeatedField<Element>::cbegin() const {
874 return const_iterator(unsafe_elements());
875}
876template <typename Element>
877inline typename RepeatedField<Element>::iterator RepeatedField<Element>::end() {
878 return iterator(unsafe_elements() + current_size_);
879}
880template <typename Element>
881inline typename RepeatedField<Element>::const_iterator
882RepeatedField<Element>::end() const {
883 return const_iterator(unsafe_elements() + current_size_);
884}
885template <typename Element>
886inline typename RepeatedField<Element>::const_iterator
887RepeatedField<Element>::cend() const {
888 return const_iterator(unsafe_elements() + current_size_);
889}
890
891template <typename Element>
892inline size_t RepeatedField<Element>::SpaceUsedExcludingSelfLong() const {
893 return total_size_ > 0 ? (total_size_ * sizeof(Element) + kRepHeaderSize) : 0;
894}
895
896namespace internal {
897// Returns the new size for a reserved field based on its 'total_size' and the
898// requested 'new_size'. The result is clamped to the closed interval:
899// [internal::kMinRepeatedFieldAllocationSize,
900// std::numeric_limits<int>::max()]
901// Requires:
902// new_size > total_size &&
903// (total_size == 0 ||
904// total_size >= kRepeatedFieldLowerClampLimit)
905template <typename T, int kRepHeaderSize>
906inline int CalculateReserveSize(int total_size, int new_size) {
907 constexpr int lower_limit = RepeatedFieldLowerClampLimit<T, kRepHeaderSize>();
908 if (new_size < lower_limit) {
909 // Clamp to smallest allowed size.
910 return lower_limit;
911 }
912 constexpr int kMaxSizeBeforeClamp =
913 (std::numeric_limits<int>::max() - kRepHeaderSize) / 2;
914 if (PROTOBUF_PREDICT_FALSE(total_size > kMaxSizeBeforeClamp)) {
915 return std::numeric_limits<int>::max();
916 }
917 // We want to double the number of bytes, not the number of elements, to try
918 // to stay within power-of-two allocations.
919 // The allocation has kRepHeaderSize + sizeof(T) * capacity.
920 int doubled_size = 2 * total_size + kRepHeaderSize / sizeof(T);
921 return std::max(doubled_size, new_size);
922}
923} // namespace internal
924
925// Avoid inlining of Reserve(): new, copy, and delete[] lead to a significant
926// amount of code bloat.
927template <typename Element>
928void RepeatedField<Element>::Reserve(int new_size) {
929 if (total_size_ >= new_size) return;
930 Rep* old_rep = total_size_ > 0 ? rep() : nullptr;
931 Rep* new_rep;
932 Arena* arena = GetOwningArena();
933
934 new_size = internal::CalculateReserveSize<Element, kRepHeaderSize>(
935 total_size_, new_size);
936
937 GOOGLE_DCHECK_LE(
938 static_cast<size_t>(new_size),
939 (std::numeric_limits<size_t>::max() - kRepHeaderSize) / sizeof(Element))
940 << "Requested size is too large to fit into size_t.";
941 size_t bytes =
942 kRepHeaderSize + sizeof(Element) * static_cast<size_t>(new_size);
943 if (arena == nullptr) {
944 new_rep = static_cast<Rep*>(::operator new(bytes));
945 } else {
946 new_rep = reinterpret_cast<Rep*>(Arena::CreateArray<char>(arena, num_elements: bytes));
947 }
948 new_rep->arena = arena;
949 int old_total_size = total_size_;
950 // Already known: new_size >= internal::kMinRepeatedFieldAllocationSize
951 // Maintain invariant:
952 // total_size_ == 0 ||
953 // total_size_ >= internal::kMinRepeatedFieldAllocationSize
954 total_size_ = new_size;
955 arena_or_elements_ = new_rep->elements();
956 // Invoke placement-new on newly allocated elements. We shouldn't have to do
957 // this, since Element is supposed to be POD, but a previous version of this
958 // code allocated storage with "new Element[size]" and some code uses
959 // RepeatedField with non-POD types, relying on constructor invocation. If
960 // Element has a trivial constructor (e.g., int32_t), gcc (tested with -O2)
961 // completely removes this loop because the loop body is empty, so this has no
962 // effect unless its side-effects are required for correctness.
963 // Note that we do this before MoveArray() below because Element's copy
964 // assignment implementation will want an initialized instance first.
965 Element* e = &elements()[0];
966 Element* limit = e + total_size_;
967 for (; e < limit; e++) {
968 new (e) Element;
969 }
970 if (current_size_ > 0) {
971 MoveArray(to: &elements()[0], from: old_rep->elements(), size: current_size_);
972 }
973
974 // Likewise, we need to invoke destructors on the old array.
975 InternalDeallocate(rep: old_rep, size: old_total_size, in_destructor: false);
976
977}
978
979template <typename Element>
980inline void RepeatedField<Element>::Truncate(int new_size) {
981 GOOGLE_DCHECK_LE(new_size, current_size_);
982 if (current_size_ > 0) {
983 current_size_ = new_size;
984 }
985}
986
987template <typename Element>
988inline void RepeatedField<Element>::MoveArray(Element* to, Element* from,
989 int array_size) {
990 CopyArray(to, from, size: array_size);
991}
992
993template <typename Element>
994inline void RepeatedField<Element>::CopyArray(Element* to, const Element* from,
995 int array_size) {
996 internal::ElementCopier<Element>()(to, from, array_size);
997}
998
999namespace internal {
1000
1001template <typename Element, bool HasTrivialCopy>
1002void ElementCopier<Element, HasTrivialCopy>::operator()(Element* to,
1003 const Element* from,
1004 int array_size) {
1005 std::copy(from, from + array_size, to);
1006}
1007
1008template <typename Element>
1009struct ElementCopier<Element, true> {
1010 void operator()(Element* to, const Element* from, int array_size) {
1011 memcpy(to, from, static_cast<size_t>(array_size) * sizeof(Element));
1012 }
1013};
1014
1015} // namespace internal
1016
1017
1018// -------------------------------------------------------------------
1019
1020// Iterators and helper functions that follow the spirit of the STL
1021// std::back_insert_iterator and std::back_inserter but are tailor-made
1022// for RepeatedField and RepeatedPtrField. Typical usage would be:
1023//
1024// std::copy(some_sequence.begin(), some_sequence.end(),
1025// RepeatedFieldBackInserter(proto.mutable_sequence()));
1026//
1027// Ported by johannes from util/gtl/proto-array-iterators.h
1028
1029namespace internal {
1030
1031// STL-like iterator implementation for RepeatedField. You should not
1032// refer to this class directly; use RepeatedField<T>::iterator instead.
1033//
1034// Note: All of the iterator operators *must* be inlined to avoid performance
1035// regressions. This is caused by the extern template declarations below (which
1036// are required because of the RepeatedField extern template declarations). If
1037// any of these functions aren't explicitly inlined (e.g. defined in the class),
1038// the compiler isn't allowed to inline them.
1039template <typename Element>
1040class RepeatedIterator {
1041 public:
1042 using iterator_category = std::random_access_iterator_tag;
1043 // Note: remove_const is necessary for std::partial_sum, which uses value_type
1044 // to determine the summation variable type.
1045 using value_type = typename std::remove_const<Element>::type;
1046 using difference_type = std::ptrdiff_t;
1047 using pointer = Element*;
1048 using reference = Element&;
1049
1050 constexpr RepeatedIterator() noexcept : it_(nullptr) {}
1051
1052 // Allows "upcasting" from RepeatedIterator<T**> to
1053 // RepeatedIterator<const T*const*>.
1054 template <typename OtherElement,
1055 typename std::enable_if<std::is_convertible<
1056 OtherElement*, pointer>::value>::type* = nullptr>
1057 constexpr RepeatedIterator(
1058 const RepeatedIterator<OtherElement>& other) noexcept
1059 : it_(other.it_) {}
1060
1061 // dereferenceable
1062 constexpr reference operator*() const noexcept { return *it_; }
1063 constexpr pointer operator->() const noexcept { return it_; }
1064
1065 private:
1066 // Helper alias to hide the internal type.
1067 using iterator = RepeatedIterator<Element>;
1068
1069 public:
1070 // {inc,dec}rementable
1071 iterator& operator++() noexcept {
1072 ++it_;
1073 return *this;
1074 }
1075 iterator operator++(int) noexcept { return iterator(it_++); }
1076 iterator& operator--() noexcept {
1077 --it_;
1078 return *this;
1079 }
1080 iterator operator--(int) noexcept { return iterator(it_--); }
1081
1082 // equality_comparable
1083 friend constexpr bool operator==(const iterator& x,
1084 const iterator& y) noexcept {
1085 return x.it_ == y.it_;
1086 }
1087 friend constexpr bool operator!=(const iterator& x,
1088 const iterator& y) noexcept {
1089 return x.it_ != y.it_;
1090 }
1091
1092 // less_than_comparable
1093 friend constexpr bool operator<(const iterator& x,
1094 const iterator& y) noexcept {
1095 return x.it_ < y.it_;
1096 }
1097 friend constexpr bool operator<=(const iterator& x,
1098 const iterator& y) noexcept {
1099 return x.it_ <= y.it_;
1100 }
1101 friend constexpr bool operator>(const iterator& x,
1102 const iterator& y) noexcept {
1103 return x.it_ > y.it_;
1104 }
1105 friend constexpr bool operator>=(const iterator& x,
1106 const iterator& y) noexcept {
1107 return x.it_ >= y.it_;
1108 }
1109
1110 // addable, subtractable
1111 iterator& operator+=(difference_type d) noexcept {
1112 it_ += d;
1113 return *this;
1114 }
1115 constexpr iterator operator+(difference_type d) const noexcept {
1116 return iterator(it_ + d);
1117 }
1118 friend constexpr iterator operator+(const difference_type d,
1119 iterator it) noexcept {
1120 return it + d;
1121 }
1122
1123 iterator& operator-=(difference_type d) noexcept {
1124 it_ -= d;
1125 return *this;
1126 }
1127 iterator constexpr operator-(difference_type d) const noexcept {
1128 return iterator(it_ - d);
1129 }
1130
1131 // indexable
1132 constexpr reference operator[](difference_type d) const noexcept {
1133 return it_[d];
1134 }
1135
1136 // random access iterator
1137 friend constexpr difference_type operator-(iterator it1,
1138 iterator it2) noexcept {
1139 return it1.it_ - it2.it_;
1140 }
1141
1142 private:
1143 template <typename OtherElement>
1144 friend class RepeatedIterator;
1145
1146 // Allow construction from RepeatedField.
1147 friend class RepeatedField<value_type>;
1148 explicit RepeatedIterator(Element* it) noexcept : it_(it) {}
1149
1150 // The internal iterator.
1151 Element* it_;
1152};
1153
1154// A back inserter for RepeatedField objects.
1155template <typename T>
1156class RepeatedFieldBackInsertIterator {
1157 public:
1158 using iterator_category = std::output_iterator_tag;
1159 using value_type = T;
1160 using pointer = void;
1161 using reference = void;
1162 using difference_type = std::ptrdiff_t;
1163
1164 explicit RepeatedFieldBackInsertIterator(
1165 RepeatedField<T>* const mutable_field)
1166 : field_(mutable_field) {}
1167 RepeatedFieldBackInsertIterator<T>& operator=(const T& value) {
1168 field_->Add(value);
1169 return *this;
1170 }
1171 RepeatedFieldBackInsertIterator<T>& operator*() { return *this; }
1172 RepeatedFieldBackInsertIterator<T>& operator++() { return *this; }
1173 RepeatedFieldBackInsertIterator<T>& operator++(int /* unused */) {
1174 return *this;
1175 }
1176
1177 private:
1178 RepeatedField<T>* field_;
1179};
1180
1181} // namespace internal
1182
1183// Provides a back insert iterator for RepeatedField instances,
1184// similar to std::back_inserter().
1185template <typename T>
1186internal::RepeatedFieldBackInsertIterator<T> RepeatedFieldBackInserter(
1187 RepeatedField<T>* const mutable_field) {
1188 return internal::RepeatedFieldBackInsertIterator<T>(mutable_field);
1189}
1190
1191// Extern declarations of common instantiations to reduce library bloat.
1192extern template class PROTOBUF_EXPORT_TEMPLATE_DECLARE RepeatedField<bool>;
1193extern template class PROTOBUF_EXPORT_TEMPLATE_DECLARE RepeatedField<int32_t>;
1194extern template class PROTOBUF_EXPORT_TEMPLATE_DECLARE RepeatedField<uint32_t>;
1195extern template class PROTOBUF_EXPORT_TEMPLATE_DECLARE RepeatedField<int64_t>;
1196extern template class PROTOBUF_EXPORT_TEMPLATE_DECLARE RepeatedField<uint64_t>;
1197extern template class PROTOBUF_EXPORT_TEMPLATE_DECLARE RepeatedField<float>;
1198extern template class PROTOBUF_EXPORT_TEMPLATE_DECLARE RepeatedField<double>;
1199
1200namespace internal {
1201extern template class PROTOBUF_EXPORT_TEMPLATE_DECLARE RepeatedIterator<bool>;
1202extern template class PROTOBUF_EXPORT_TEMPLATE_DECLARE
1203 RepeatedIterator<int32_t>;
1204extern template class PROTOBUF_EXPORT_TEMPLATE_DECLARE
1205 RepeatedIterator<uint32_t>;
1206extern template class PROTOBUF_EXPORT_TEMPLATE_DECLARE
1207 RepeatedIterator<int64_t>;
1208extern template class PROTOBUF_EXPORT_TEMPLATE_DECLARE
1209 RepeatedIterator<uint64_t>;
1210extern template class PROTOBUF_EXPORT_TEMPLATE_DECLARE RepeatedIterator<float>;
1211extern template class PROTOBUF_EXPORT_TEMPLATE_DECLARE RepeatedIterator<double>;
1212} // namespace internal
1213
1214} // namespace protobuf
1215} // namespace google
1216
1217#include <google/protobuf/port_undef.inc>
1218
1219#endif // GOOGLE_PROTOBUF_REPEATED_FIELD_H__
1220