1 | // Copyright 2019 The Abseil Authors. |
2 | // |
3 | // Licensed under the Apache License, Version 2.0 (the "License"); |
4 | // you may not use this file except in compliance with the License. |
5 | // You may obtain a copy of the License at |
6 | // |
7 | // https://www.apache.org/licenses/LICENSE-2.0 |
8 | // |
9 | // Unless required by applicable law or agreed to in writing, software |
10 | // distributed under the License is distributed on an "AS IS" BASIS, |
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
12 | // See the License for the specific language governing permissions and |
13 | // limitations under the License. |
14 | |
15 | #ifndef ABSL_CONTAINER_INTERNAL_INLINED_VECTOR_INTERNAL_H_ |
16 | #define ABSL_CONTAINER_INTERNAL_INLINED_VECTOR_INTERNAL_H_ |
17 | |
18 | #include <cstddef> |
19 | #include <cstring> |
20 | #include <iterator> |
21 | #include <memory> |
22 | #include <utility> |
23 | |
24 | #include "absl/base/macros.h" |
25 | #include "absl/container/internal/compressed_tuple.h" |
26 | #include "absl/memory/memory.h" |
27 | #include "absl/meta/type_traits.h" |
28 | #include "absl/types/span.h" |
29 | |
30 | namespace absl { |
31 | namespace inlined_vector_internal { |
32 | |
33 | template <typename Iterator> |
34 | using IsAtLeastForwardIterator = std::is_convertible< |
35 | typename std::iterator_traits<Iterator>::iterator_category, |
36 | std::forward_iterator_tag>; |
37 | |
38 | template <typename AllocatorType> |
39 | using IsMemcpyOk = absl::conjunction< |
40 | std::is_same<std::allocator<typename AllocatorType::value_type>, |
41 | AllocatorType>, |
42 | absl::is_trivially_copy_constructible<typename AllocatorType::value_type>, |
43 | absl::is_trivially_copy_assignable<typename AllocatorType::value_type>, |
44 | absl::is_trivially_destructible<typename AllocatorType::value_type>>; |
45 | |
46 | template <typename AllocatorType, typename ValueType, typename SizeType> |
47 | void DestroyElements(AllocatorType* alloc_ptr, ValueType* destroy_first, |
48 | SizeType destroy_size) { |
49 | using AllocatorTraits = absl::allocator_traits<AllocatorType>; |
50 | |
51 | if (destroy_first != nullptr) { |
52 | for (auto i = destroy_size; i != 0;) { |
53 | --i; |
54 | AllocatorTraits::destroy(*alloc_ptr, destroy_first + i); |
55 | } |
56 | |
57 | #ifndef NDEBUG |
58 | // Overwrite unused memory with `0xab` so we can catch uninitialized usage. |
59 | // |
60 | // Cast to `void*` to tell the compiler that we don't care that we might be |
61 | // scribbling on a vtable pointer. |
62 | auto* memory_ptr = static_cast<void*>(destroy_first); |
63 | auto memory_size = sizeof(ValueType) * destroy_size; |
64 | std::memset(memory_ptr, 0xab, memory_size); |
65 | #endif // NDEBUG |
66 | } |
67 | } |
68 | |
69 | template <typename AllocatorType, typename ValueType, typename ValueAdapter, |
70 | typename SizeType> |
71 | void ConstructElements(AllocatorType* alloc_ptr, ValueType* construct_first, |
72 | ValueAdapter* values_ptr, SizeType construct_size) { |
73 | // If any construction fails, all completed constructions are rolled back. |
74 | for (SizeType i = 0; i < construct_size; ++i) { |
75 | ABSL_INTERNAL_TRY { |
76 | values_ptr->ConstructNext(alloc_ptr, construct_first + i); |
77 | } |
78 | ABSL_INTERNAL_CATCH_ANY { |
79 | inlined_vector_internal::DestroyElements(alloc_ptr, construct_first, i); |
80 | |
81 | ABSL_INTERNAL_RETHROW; |
82 | } |
83 | } |
84 | } |
85 | |
86 | template <typename ValueType, typename ValueAdapter, typename SizeType> |
87 | void AssignElements(ValueType* assign_first, ValueAdapter* values_ptr, |
88 | SizeType assign_size) { |
89 | for (SizeType i = 0; i < assign_size; ++i) { |
90 | values_ptr->AssignNext(assign_first + i); |
91 | } |
92 | } |
93 | |
94 | template <typename AllocatorType> |
95 | struct StorageView { |
96 | using pointer = typename AllocatorType::pointer; |
97 | using size_type = typename AllocatorType::size_type; |
98 | |
99 | pointer data; |
100 | size_type size; |
101 | size_type capacity; |
102 | }; |
103 | |
104 | template <typename AllocatorType, typename Iterator> |
105 | class IteratorValueAdapter { |
106 | using pointer = typename AllocatorType::pointer; |
107 | using AllocatorTraits = absl::allocator_traits<AllocatorType>; |
108 | |
109 | public: |
110 | explicit IteratorValueAdapter(const Iterator& it) : it_(it) {} |
111 | |
112 | void ConstructNext(AllocatorType* alloc_ptr, pointer construct_at) { |
113 | AllocatorTraits::construct(*alloc_ptr, construct_at, *it_); |
114 | ++it_; |
115 | } |
116 | |
117 | void AssignNext(pointer assign_at) { |
118 | *assign_at = *it_; |
119 | ++it_; |
120 | } |
121 | |
122 | private: |
123 | Iterator it_; |
124 | }; |
125 | |
126 | template <typename AllocatorType> |
127 | class CopyValueAdapter { |
128 | using pointer = typename AllocatorType::pointer; |
129 | using const_pointer = typename AllocatorType::const_pointer; |
130 | using const_reference = typename AllocatorType::const_reference; |
131 | using AllocatorTraits = absl::allocator_traits<AllocatorType>; |
132 | |
133 | public: |
134 | explicit CopyValueAdapter(const_reference v) : ptr_(std::addressof(v)) {} |
135 | |
136 | void ConstructNext(AllocatorType* alloc_ptr, pointer construct_at) { |
137 | AllocatorTraits::construct(*alloc_ptr, construct_at, *ptr_); |
138 | } |
139 | |
140 | void AssignNext(pointer assign_at) { *assign_at = *ptr_; } |
141 | |
142 | private: |
143 | const_pointer ptr_; |
144 | }; |
145 | |
146 | template <typename AllocatorType> |
147 | class DefaultValueAdapter { |
148 | using pointer = typename AllocatorType::pointer; |
149 | using value_type = typename AllocatorType::value_type; |
150 | using AllocatorTraits = absl::allocator_traits<AllocatorType>; |
151 | |
152 | public: |
153 | explicit DefaultValueAdapter() {} |
154 | |
155 | void ConstructNext(AllocatorType* alloc_ptr, pointer construct_at) { |
156 | AllocatorTraits::construct(*alloc_ptr, construct_at); |
157 | } |
158 | |
159 | void AssignNext(pointer assign_at) { *assign_at = value_type(); } |
160 | }; |
161 | |
162 | template <typename AllocatorType> |
163 | class AllocationTransaction { |
164 | using value_type = typename AllocatorType::value_type; |
165 | using pointer = typename AllocatorType::pointer; |
166 | using size_type = typename AllocatorType::size_type; |
167 | using AllocatorTraits = absl::allocator_traits<AllocatorType>; |
168 | |
169 | public: |
170 | explicit AllocationTransaction(AllocatorType* alloc_ptr) |
171 | : alloc_data_(*alloc_ptr, nullptr) {} |
172 | |
173 | AllocationTransaction(const AllocationTransaction&) = delete; |
174 | void operator=(const AllocationTransaction&) = delete; |
175 | |
176 | AllocatorType& GetAllocator() { return alloc_data_.template get<0>(); } |
177 | pointer& GetData() { return alloc_data_.template get<1>(); } |
178 | size_type& GetCapacity() { return capacity_; } |
179 | |
180 | bool DidAllocate() { return GetData() != nullptr; } |
181 | pointer Allocate(size_type capacity) { |
182 | GetData() = AllocatorTraits::allocate(GetAllocator(), capacity); |
183 | GetCapacity() = capacity; |
184 | return GetData(); |
185 | } |
186 | |
187 | ~AllocationTransaction() { |
188 | if (DidAllocate()) { |
189 | AllocatorTraits::deallocate(GetAllocator(), GetData(), GetCapacity()); |
190 | } |
191 | } |
192 | |
193 | private: |
194 | container_internal::CompressedTuple<AllocatorType, pointer> alloc_data_; |
195 | size_type capacity_ = 0; |
196 | }; |
197 | |
198 | template <typename AllocatorType> |
199 | class ConstructionTransaction { |
200 | using pointer = typename AllocatorType::pointer; |
201 | using size_type = typename AllocatorType::size_type; |
202 | |
203 | public: |
204 | explicit ConstructionTransaction(AllocatorType* alloc_ptr) |
205 | : alloc_data_(*alloc_ptr, nullptr) {} |
206 | |
207 | ConstructionTransaction(const ConstructionTransaction&) = delete; |
208 | void operator=(const ConstructionTransaction&) = delete; |
209 | |
210 | template <typename ValueAdapter> |
211 | void Construct(pointer data, ValueAdapter* values_ptr, size_type size) { |
212 | inlined_vector_internal::ConstructElements(std::addressof(GetAllocator()), |
213 | data, values_ptr, size); |
214 | GetData() = data; |
215 | GetSize() = size; |
216 | } |
217 | void Commit() { |
218 | GetData() = nullptr; |
219 | GetSize() = 0; |
220 | } |
221 | |
222 | ~ConstructionTransaction() { |
223 | if (GetData() != nullptr) { |
224 | inlined_vector_internal::DestroyElements(std::addressof(GetAllocator()), |
225 | GetData(), GetSize()); |
226 | } |
227 | } |
228 | |
229 | private: |
230 | AllocatorType& GetAllocator() { return alloc_data_.template get<0>(); } |
231 | pointer& GetData() { return alloc_data_.template get<1>(); } |
232 | size_type& GetSize() { return size_; } |
233 | |
234 | container_internal::CompressedTuple<AllocatorType, pointer> alloc_data_; |
235 | size_type size_ = 0; |
236 | }; |
237 | |
238 | template <typename T, size_t N, typename A> |
239 | class Storage { |
240 | public: |
241 | using allocator_type = A; |
242 | using value_type = typename allocator_type::value_type; |
243 | using pointer = typename allocator_type::pointer; |
244 | using const_pointer = typename allocator_type::const_pointer; |
245 | using reference = typename allocator_type::reference; |
246 | using const_reference = typename allocator_type::const_reference; |
247 | using rvalue_reference = typename allocator_type::value_type&&; |
248 | using size_type = typename allocator_type::size_type; |
249 | using difference_type = typename allocator_type::difference_type; |
250 | using iterator = pointer; |
251 | using const_iterator = const_pointer; |
252 | using reverse_iterator = std::reverse_iterator<iterator>; |
253 | using const_reverse_iterator = std::reverse_iterator<const_iterator>; |
254 | using MoveIterator = std::move_iterator<iterator>; |
255 | using AllocatorTraits = absl::allocator_traits<allocator_type>; |
256 | using IsMemcpyOk = inlined_vector_internal::IsMemcpyOk<allocator_type>; |
257 | |
258 | using StorageView = inlined_vector_internal::StorageView<allocator_type>; |
259 | |
260 | template <typename Iterator> |
261 | using IteratorValueAdapter = |
262 | inlined_vector_internal::IteratorValueAdapter<allocator_type, Iterator>; |
263 | using CopyValueAdapter = |
264 | inlined_vector_internal::CopyValueAdapter<allocator_type>; |
265 | using DefaultValueAdapter = |
266 | inlined_vector_internal::DefaultValueAdapter<allocator_type>; |
267 | |
268 | using AllocationTransaction = |
269 | inlined_vector_internal::AllocationTransaction<allocator_type>; |
270 | using ConstructionTransaction = |
271 | inlined_vector_internal::ConstructionTransaction<allocator_type>; |
272 | |
273 | Storage() : metadata_() {} |
274 | |
275 | explicit Storage(const allocator_type& alloc) |
276 | : metadata_(alloc, /* empty and inlined */ 0) {} |
277 | |
278 | ~Storage() { |
279 | pointer data = GetIsAllocated() ? GetAllocatedData() : GetInlinedData(); |
280 | inlined_vector_internal::DestroyElements(GetAllocPtr(), data, GetSize()); |
281 | DeallocateIfAllocated(); |
282 | } |
283 | |
284 | size_type GetSize() const { return GetSizeAndIsAllocated() >> 1; } |
285 | |
286 | bool GetIsAllocated() const { return GetSizeAndIsAllocated() & 1; } |
287 | |
288 | pointer GetInlinedData() { |
289 | return reinterpret_cast<pointer>( |
290 | std::addressof(data_.inlined.inlined_data[0])); |
291 | } |
292 | |
293 | const_pointer GetInlinedData() const { |
294 | return reinterpret_cast<const_pointer>( |
295 | std::addressof(data_.inlined.inlined_data[0])); |
296 | } |
297 | |
298 | pointer GetAllocatedData() { return data_.allocated.allocated_data; } |
299 | |
300 | const_pointer GetAllocatedData() const { |
301 | return data_.allocated.allocated_data; |
302 | } |
303 | |
304 | size_type GetAllocatedCapacity() const { |
305 | return data_.allocated.allocated_capacity; |
306 | } |
307 | |
308 | StorageView MakeStorageView() { |
309 | return GetIsAllocated() ? StorageView{GetAllocatedData(), GetSize(), |
310 | GetAllocatedCapacity()} |
311 | : StorageView{GetInlinedData(), GetSize(), |
312 | static_cast<size_type>(N)}; |
313 | } |
314 | |
315 | allocator_type* GetAllocPtr() { |
316 | return std::addressof(metadata_.template get<0>()); |
317 | } |
318 | |
319 | const allocator_type* GetAllocPtr() const { |
320 | return std::addressof(metadata_.template get<0>()); |
321 | } |
322 | |
323 | void SetIsAllocated() { GetSizeAndIsAllocated() |= 1; } |
324 | |
325 | void UnsetIsAllocated() { |
326 | SetIsAllocated(); |
327 | GetSizeAndIsAllocated() -= 1; |
328 | } |
329 | |
330 | void SetAllocatedSize(size_type size) { |
331 | GetSizeAndIsAllocated() = (size << 1) | static_cast<size_type>(1); |
332 | } |
333 | |
334 | void SetInlinedSize(size_type size) { GetSizeAndIsAllocated() = size << 1; } |
335 | |
336 | void SetSize(size_type size) { |
337 | GetSizeAndIsAllocated() = |
338 | (size << 1) | static_cast<size_type>(GetIsAllocated()); |
339 | } |
340 | |
341 | void AddSize(size_type count) { GetSizeAndIsAllocated() += count << 1; } |
342 | |
343 | void SubtractSize(size_type count) { |
344 | assert(count <= GetSize()); |
345 | GetSizeAndIsAllocated() -= count << 1; |
346 | } |
347 | |
348 | void SetAllocatedData(pointer data, size_type capacity) { |
349 | data_.allocated.allocated_data = data; |
350 | data_.allocated.allocated_capacity = capacity; |
351 | } |
352 | |
353 | void DeallocateIfAllocated() { |
354 | if (GetIsAllocated()) { |
355 | AllocatorTraits::deallocate(*GetAllocPtr(), GetAllocatedData(), |
356 | GetAllocatedCapacity()); |
357 | } |
358 | } |
359 | |
360 | void AcquireAllocation(AllocationTransaction* allocation_tx_ptr) { |
361 | SetAllocatedData(allocation_tx_ptr->GetData(), |
362 | allocation_tx_ptr->GetCapacity()); |
363 | allocation_tx_ptr->GetData() = nullptr; |
364 | allocation_tx_ptr->GetCapacity() = 0; |
365 | } |
366 | |
367 | void SwapSizeAndIsAllocated(Storage* other) { |
368 | using std::swap; |
369 | swap(GetSizeAndIsAllocated(), other->GetSizeAndIsAllocated()); |
370 | } |
371 | |
372 | void SwapAllocatedSizeAndCapacity(Storage* other) { |
373 | using std::swap; |
374 | swap(data_.allocated, other->data_.allocated); |
375 | } |
376 | |
377 | void MemcpyFrom(const Storage& other_storage) { |
378 | assert(IsMemcpyOk::value || other_storage.GetIsAllocated()); |
379 | |
380 | GetSizeAndIsAllocated() = other_storage.GetSizeAndIsAllocated(); |
381 | data_ = other_storage.data_; |
382 | } |
383 | |
384 | template <typename ValueAdapter> |
385 | void Initialize(ValueAdapter values, size_type new_size); |
386 | |
387 | template <typename ValueAdapter> |
388 | void Assign(ValueAdapter values, size_type new_size); |
389 | |
390 | template <typename ValueAdapter> |
391 | void Resize(ValueAdapter values, size_type new_size); |
392 | |
393 | void Reserve(size_type requested_capacity); |
394 | |
395 | void ShrinkToFit(); |
396 | |
397 | private: |
398 | size_type& GetSizeAndIsAllocated() { return metadata_.template get<1>(); } |
399 | |
400 | const size_type& GetSizeAndIsAllocated() const { |
401 | return metadata_.template get<1>(); |
402 | } |
403 | |
404 | static size_type LegacyNextCapacityFrom(size_type current_capacity, |
405 | size_type requested_capacity) { |
406 | // TODO(johnsoncj): Get rid of this old behavior. |
407 | size_type new_capacity = current_capacity; |
408 | while (new_capacity < requested_capacity) { |
409 | new_capacity *= 2; |
410 | } |
411 | return new_capacity; |
412 | } |
413 | |
414 | using Metadata = |
415 | container_internal::CompressedTuple<allocator_type, size_type>; |
416 | |
417 | struct Allocated { |
418 | pointer allocated_data; |
419 | size_type allocated_capacity; |
420 | }; |
421 | |
422 | struct Inlined { |
423 | using InlinedDataElement = |
424 | absl::aligned_storage_t<sizeof(value_type), alignof(value_type)>; |
425 | InlinedDataElement inlined_data[N]; |
426 | }; |
427 | |
428 | union Data { |
429 | Allocated allocated; |
430 | Inlined inlined; |
431 | }; |
432 | |
433 | Metadata metadata_; |
434 | Data data_; |
435 | }; |
436 | |
437 | template <typename T, size_t N, typename A> |
438 | template <typename ValueAdapter> |
439 | auto Storage<T, N, A>::Initialize(ValueAdapter values, size_type new_size) |
440 | -> void { |
441 | // Only callable from constructors! |
442 | assert(!GetIsAllocated()); |
443 | assert(GetSize() == 0); |
444 | |
445 | pointer construct_data; |
446 | |
447 | if (new_size > static_cast<size_type>(N)) { |
448 | // Because this is only called from the `InlinedVector` constructors, it's |
449 | // safe to take on the allocation with size `0`. If `ConstructElements(...)` |
450 | // throws, deallocation will be automatically handled by `~Storage()`. |
451 | construct_data = AllocatorTraits::allocate(*GetAllocPtr(), new_size); |
452 | SetAllocatedData(construct_data, new_size); |
453 | SetIsAllocated(); |
454 | } else { |
455 | construct_data = GetInlinedData(); |
456 | } |
457 | |
458 | inlined_vector_internal::ConstructElements(GetAllocPtr(), construct_data, |
459 | &values, new_size); |
460 | |
461 | // Since the initial size was guaranteed to be `0` and the allocated bit is |
462 | // already correct for either case, *adding* `new_size` gives us the correct |
463 | // result faster than setting it directly. |
464 | AddSize(new_size); |
465 | } |
466 | |
467 | template <typename T, size_t N, typename A> |
468 | template <typename ValueAdapter> |
469 | auto Storage<T, N, A>::Assign(ValueAdapter values, size_type new_size) -> void { |
470 | StorageView storage_view = MakeStorageView(); |
471 | |
472 | AllocationTransaction allocation_tx(GetAllocPtr()); |
473 | |
474 | absl::Span<value_type> assign_loop; |
475 | absl::Span<value_type> construct_loop; |
476 | absl::Span<value_type> destroy_loop; |
477 | |
478 | if (new_size > storage_view.capacity) { |
479 | construct_loop = {allocation_tx.Allocate(new_size), new_size}; |
480 | destroy_loop = {storage_view.data, storage_view.size}; |
481 | } else if (new_size > storage_view.size) { |
482 | assign_loop = {storage_view.data, storage_view.size}; |
483 | construct_loop = {storage_view.data + storage_view.size, |
484 | new_size - storage_view.size}; |
485 | } else { |
486 | assign_loop = {storage_view.data, new_size}; |
487 | destroy_loop = {storage_view.data + new_size, storage_view.size - new_size}; |
488 | } |
489 | |
490 | inlined_vector_internal::AssignElements(assign_loop.data(), &values, |
491 | assign_loop.size()); |
492 | |
493 | inlined_vector_internal::ConstructElements( |
494 | GetAllocPtr(), construct_loop.data(), &values, construct_loop.size()); |
495 | |
496 | inlined_vector_internal::DestroyElements(GetAllocPtr(), destroy_loop.data(), |
497 | destroy_loop.size()); |
498 | |
499 | if (allocation_tx.DidAllocate()) { |
500 | DeallocateIfAllocated(); |
501 | AcquireAllocation(&allocation_tx); |
502 | SetIsAllocated(); |
503 | } |
504 | |
505 | SetSize(new_size); |
506 | } |
507 | |
508 | template <typename T, size_t N, typename A> |
509 | template <typename ValueAdapter> |
510 | auto Storage<T, N, A>::Resize(ValueAdapter values, size_type new_size) -> void { |
511 | StorageView storage_view = MakeStorageView(); |
512 | |
513 | AllocationTransaction allocation_tx(GetAllocPtr()); |
514 | ConstructionTransaction construction_tx(GetAllocPtr()); |
515 | |
516 | IteratorValueAdapter<MoveIterator> move_values( |
517 | MoveIterator(storage_view.data)); |
518 | |
519 | absl::Span<value_type> construct_loop; |
520 | absl::Span<value_type> move_construct_loop; |
521 | absl::Span<value_type> destroy_loop; |
522 | |
523 | if (new_size > storage_view.capacity) { |
524 | pointer new_data = allocation_tx.Allocate( |
525 | LegacyNextCapacityFrom(storage_view.capacity, new_size)); |
526 | |
527 | // Construct new objects in `new_data` |
528 | construct_loop = {new_data + storage_view.size, |
529 | new_size - storage_view.size}; |
530 | |
531 | // Move all existing objects into `new_data` |
532 | move_construct_loop = {new_data, storage_view.size}; |
533 | |
534 | // Destroy all existing objects in `storage_view.data` |
535 | destroy_loop = {storage_view.data, storage_view.size}; |
536 | } else if (new_size > storage_view.size) { |
537 | // Construct new objects in `storage_view.data` |
538 | construct_loop = {storage_view.data + storage_view.size, |
539 | new_size - storage_view.size}; |
540 | } else { |
541 | // Destroy end `storage_view.size - new_size` objects in `storage_view.data` |
542 | destroy_loop = {storage_view.data + new_size, storage_view.size - new_size}; |
543 | } |
544 | |
545 | construction_tx.Construct(construct_loop.data(), &values, |
546 | construct_loop.size()); |
547 | |
548 | inlined_vector_internal::ConstructElements( |
549 | GetAllocPtr(), move_construct_loop.data(), &move_values, |
550 | move_construct_loop.size()); |
551 | |
552 | inlined_vector_internal::DestroyElements(GetAllocPtr(), destroy_loop.data(), |
553 | destroy_loop.size()); |
554 | |
555 | construction_tx.Commit(); |
556 | if (allocation_tx.DidAllocate()) { |
557 | DeallocateIfAllocated(); |
558 | AcquireAllocation(&allocation_tx); |
559 | SetIsAllocated(); |
560 | } |
561 | |
562 | SetSize(new_size); |
563 | } |
564 | |
565 | template <typename T, size_t N, typename A> |
566 | auto Storage<T, N, A>::Reserve(size_type requested_capacity) -> void { |
567 | StorageView storage_view = MakeStorageView(); |
568 | |
569 | if (ABSL_PREDICT_FALSE(requested_capacity <= storage_view.capacity)) return; |
570 | |
571 | AllocationTransaction allocation_tx(GetAllocPtr()); |
572 | |
573 | IteratorValueAdapter<MoveIterator> move_values( |
574 | MoveIterator(storage_view.data)); |
575 | |
576 | pointer new_data = allocation_tx.Allocate( |
577 | LegacyNextCapacityFrom(storage_view.capacity, requested_capacity)); |
578 | |
579 | inlined_vector_internal::ConstructElements(GetAllocPtr(), new_data, |
580 | &move_values, storage_view.size); |
581 | |
582 | inlined_vector_internal::DestroyElements(GetAllocPtr(), storage_view.data, |
583 | storage_view.size); |
584 | |
585 | DeallocateIfAllocated(); |
586 | AcquireAllocation(&allocation_tx); |
587 | SetIsAllocated(); |
588 | } |
589 | |
590 | template <typename T, size_t N, typename A> |
591 | auto Storage<T, N, A>::ShrinkToFit() -> void { |
592 | // May only be called on allocated instances! |
593 | assert(GetIsAllocated()); |
594 | |
595 | StorageView storage_view = {GetAllocatedData(), GetSize(), |
596 | GetAllocatedCapacity()}; |
597 | |
598 | AllocationTransaction allocation_tx(GetAllocPtr()); |
599 | |
600 | IteratorValueAdapter<MoveIterator> move_values( |
601 | MoveIterator(storage_view.data)); |
602 | |
603 | pointer construct_data; |
604 | |
605 | if (storage_view.size <= static_cast<size_type>(N)) { |
606 | construct_data = GetInlinedData(); |
607 | } else if (storage_view.size < GetAllocatedCapacity()) { |
608 | construct_data = allocation_tx.Allocate(storage_view.size); |
609 | } else { |
610 | return; |
611 | } |
612 | |
613 | ABSL_INTERNAL_TRY { |
614 | inlined_vector_internal::ConstructElements(GetAllocPtr(), construct_data, |
615 | &move_values, storage_view.size); |
616 | } |
617 | ABSL_INTERNAL_CATCH_ANY { |
618 | // Writing to inlined data will trample on the existing state, thus it needs |
619 | // to be restored when a construction fails. |
620 | SetAllocatedData(storage_view.data, storage_view.capacity); |
621 | ABSL_INTERNAL_RETHROW; |
622 | } |
623 | |
624 | inlined_vector_internal::DestroyElements(GetAllocPtr(), storage_view.data, |
625 | storage_view.size); |
626 | |
627 | AllocatorTraits::deallocate(*GetAllocPtr(), storage_view.data, |
628 | storage_view.capacity); |
629 | |
630 | if (allocation_tx.DidAllocate()) { |
631 | AcquireAllocation(&allocation_tx); |
632 | } else { |
633 | UnsetIsAllocated(); |
634 | } |
635 | } |
636 | |
637 | } // namespace inlined_vector_internal |
638 | } // namespace absl |
639 | |
640 | #endif // ABSL_CONTAINER_INTERNAL_INLINED_VECTOR_INTERNAL_H_ |
641 | |