1 // Copyright 2019 The Abseil Authors.
3 // Licensed under the Apache License, Version 2.0 (the "License");
4 // you may not use this file except in compliance with the License.
5 // You may obtain a copy of the License at
7 // https://www.apache.org/licenses/LICENSE-2.0
9 // Unless required by applicable law or agreed to in writing, software
10 // distributed under the License is distributed on an "AS IS" BASIS,
11 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 // See the License for the specific language governing permissions and
13 // limitations under the License.
15 #ifndef ABSL_CONTAINER_INTERNAL_INLINED_VECTOR_INTERNAL_H_
16 #define ABSL_CONTAINER_INTERNAL_INLINED_VECTOR_INTERNAL_H_
24 #include "absl/base/macros.h"
25 #include "absl/container/internal/compressed_tuple.h"
26 #include "absl/memory/memory.h"
27 #include "absl/meta/type_traits.h"
28 #include "absl/types/span.h"
31 namespace inlined_vector_internal {
33 template <typename Iterator>
34 using IsAtLeastForwardIterator = std::is_convertible<
35 typename std::iterator_traits<Iterator>::iterator_category,
36 std::forward_iterator_tag>;
38 template <typename AllocatorType>
39 using IsMemcpyOk = absl::conjunction<
40 std::is_same<std::allocator<typename AllocatorType::value_type>,
42 absl::is_trivially_copy_constructible<typename AllocatorType::value_type>,
43 absl::is_trivially_copy_assignable<typename AllocatorType::value_type>,
44 absl::is_trivially_destructible<typename AllocatorType::value_type>>;
46 template <typename AllocatorType, typename ValueType, typename SizeType>
47 void DestroyElements(AllocatorType* alloc_ptr, ValueType* destroy_first,
48 SizeType destroy_size) {
49 using AllocatorTraits = absl::allocator_traits<AllocatorType>;
51 if (destroy_first != nullptr) {
52 for (auto i = destroy_size; i != 0;) {
54 AllocatorTraits::destroy(*alloc_ptr, destroy_first + i);
58 // Overwrite unused memory with `0xab` so we can catch uninitialized usage.
60 // Cast to `void*` to tell the compiler that we don't care that we might be
61 // scribbling on a vtable pointer.
62 auto* memory_ptr = static_cast<void*>(destroy_first);
63 auto memory_size = sizeof(ValueType) * destroy_size;
64 std::memset(memory_ptr, 0xab, memory_size);
69 template <typename AllocatorType, typename ValueType, typename ValueAdapter,
71 void ConstructElements(AllocatorType* alloc_ptr, ValueType* construct_first,
72 ValueAdapter* values_ptr, SizeType construct_size) {
73 // If any construction fails, all completed constructions are rolled back.
74 for (SizeType i = 0; i < construct_size; ++i) {
76 values_ptr->ConstructNext(alloc_ptr, construct_first + i);
78 ABSL_INTERNAL_CATCH_ANY {
79 inlined_vector_internal::DestroyElements(alloc_ptr, construct_first, i);
81 ABSL_INTERNAL_RETHROW;
86 template <typename ValueType, typename ValueAdapter, typename SizeType>
87 void AssignElements(ValueType* assign_first, ValueAdapter* values_ptr,
88 SizeType assign_size) {
89 for (SizeType i = 0; i < assign_size; ++i) {
90 values_ptr->AssignNext(assign_first + i);
94 template <typename AllocatorType>
96 using pointer = typename AllocatorType::pointer;
97 using size_type = typename AllocatorType::size_type;
104 template <typename AllocatorType, typename Iterator>
105 class IteratorValueAdapter {
106 using pointer = typename AllocatorType::pointer;
107 using AllocatorTraits = absl::allocator_traits<AllocatorType>;
110 explicit IteratorValueAdapter(const Iterator& it) : it_(it) {}
112 void ConstructNext(AllocatorType* alloc_ptr, pointer construct_at) {
113 AllocatorTraits::construct(*alloc_ptr, construct_at, *it_);
117 void AssignNext(pointer assign_at) {
126 template <typename AllocatorType>
127 class CopyValueAdapter {
128 using pointer = typename AllocatorType::pointer;
129 using const_pointer = typename AllocatorType::const_pointer;
130 using const_reference = typename AllocatorType::const_reference;
131 using AllocatorTraits = absl::allocator_traits<AllocatorType>;
134 explicit CopyValueAdapter(const_reference v) : ptr_(std::addressof(v)) {}
136 void ConstructNext(AllocatorType* alloc_ptr, pointer construct_at) {
137 AllocatorTraits::construct(*alloc_ptr, construct_at, *ptr_);
140 void AssignNext(pointer assign_at) { *assign_at = *ptr_; }
146 template <typename AllocatorType>
147 class DefaultValueAdapter {
148 using pointer = typename AllocatorType::pointer;
149 using value_type = typename AllocatorType::value_type;
150 using AllocatorTraits = absl::allocator_traits<AllocatorType>;
153 explicit DefaultValueAdapter() {}
155 void ConstructNext(AllocatorType* alloc_ptr, pointer construct_at) {
156 AllocatorTraits::construct(*alloc_ptr, construct_at);
159 void AssignNext(pointer assign_at) { *assign_at = value_type(); }
162 template <typename AllocatorType>
163 class AllocationTransaction {
164 using value_type = typename AllocatorType::value_type;
165 using pointer = typename AllocatorType::pointer;
166 using size_type = typename AllocatorType::size_type;
167 using AllocatorTraits = absl::allocator_traits<AllocatorType>;
170 explicit AllocationTransaction(AllocatorType* alloc_ptr)
171 : alloc_data_(*alloc_ptr, nullptr) {}
173 AllocationTransaction(const AllocationTransaction&) = delete;
174 void operator=(const AllocationTransaction&) = delete;
176 AllocatorType& GetAllocator() { return alloc_data_.template get<0>(); }
177 pointer& GetData() { return alloc_data_.template get<1>(); }
178 size_type& GetCapacity() { return capacity_; }
180 bool DidAllocate() { return GetData() != nullptr; }
181 pointer Allocate(size_type capacity) {
182 GetData() = AllocatorTraits::allocate(GetAllocator(), capacity);
183 GetCapacity() = capacity;
187 ~AllocationTransaction() {
189 AllocatorTraits::deallocate(GetAllocator(), GetData(), GetCapacity());
194 container_internal::CompressedTuple<AllocatorType, pointer> alloc_data_;
195 size_type capacity_ = 0;
198 template <typename AllocatorType>
199 class ConstructionTransaction {
200 using pointer = typename AllocatorType::pointer;
201 using size_type = typename AllocatorType::size_type;
204 explicit ConstructionTransaction(AllocatorType* alloc_ptr)
205 : alloc_data_(*alloc_ptr, nullptr) {}
207 ConstructionTransaction(const ConstructionTransaction&) = delete;
208 void operator=(const ConstructionTransaction&) = delete;
210 template <typename ValueAdapter>
211 void Construct(pointer data, ValueAdapter* values_ptr, size_type size) {
212 inlined_vector_internal::ConstructElements(std::addressof(GetAllocator()),
213 data, values_ptr, size);
222 ~ConstructionTransaction() {
223 if (GetData() != nullptr) {
224 inlined_vector_internal::DestroyElements(std::addressof(GetAllocator()),
225 GetData(), GetSize());
230 AllocatorType& GetAllocator() { return alloc_data_.template get<0>(); }
231 pointer& GetData() { return alloc_data_.template get<1>(); }
232 size_type& GetSize() { return size_; }
234 container_internal::CompressedTuple<AllocatorType, pointer> alloc_data_;
238 template <typename T, size_t N, typename A>
241 using allocator_type = A;
242 using value_type = typename allocator_type::value_type;
243 using pointer = typename allocator_type::pointer;
244 using const_pointer = typename allocator_type::const_pointer;
245 using reference = typename allocator_type::reference;
246 using const_reference = typename allocator_type::const_reference;
247 using rvalue_reference = typename allocator_type::value_type&&;
248 using size_type = typename allocator_type::size_type;
249 using difference_type = typename allocator_type::difference_type;
250 using iterator = pointer;
251 using const_iterator = const_pointer;
252 using reverse_iterator = std::reverse_iterator<iterator>;
253 using const_reverse_iterator = std::reverse_iterator<const_iterator>;
254 using MoveIterator = std::move_iterator<iterator>;
255 using AllocatorTraits = absl::allocator_traits<allocator_type>;
256 using IsMemcpyOk = inlined_vector_internal::IsMemcpyOk<allocator_type>;
258 using StorageView = inlined_vector_internal::StorageView<allocator_type>;
260 template <typename Iterator>
261 using IteratorValueAdapter =
262 inlined_vector_internal::IteratorValueAdapter<allocator_type, Iterator>;
263 using CopyValueAdapter =
264 inlined_vector_internal::CopyValueAdapter<allocator_type>;
265 using DefaultValueAdapter =
266 inlined_vector_internal::DefaultValueAdapter<allocator_type>;
268 using AllocationTransaction =
269 inlined_vector_internal::AllocationTransaction<allocator_type>;
270 using ConstructionTransaction =
271 inlined_vector_internal::ConstructionTransaction<allocator_type>;
273 Storage() : metadata_() {}
275 explicit Storage(const allocator_type& alloc)
276 : metadata_(alloc, /* empty and inlined */ 0) {}
279 pointer data = GetIsAllocated() ? GetAllocatedData() : GetInlinedData();
280 inlined_vector_internal::DestroyElements(GetAllocPtr(), data, GetSize());
281 DeallocateIfAllocated();
284 size_type GetSize() const { return GetSizeAndIsAllocated() >> 1; }
286 bool GetIsAllocated() const { return GetSizeAndIsAllocated() & 1; }
288 pointer GetInlinedData() {
289 return reinterpret_cast<pointer>(
290 std::addressof(data_.inlined.inlined_data[0]));
293 const_pointer GetInlinedData() const {
294 return reinterpret_cast<const_pointer>(
295 std::addressof(data_.inlined.inlined_data[0]));
298 pointer GetAllocatedData() { return data_.allocated.allocated_data; }
300 const_pointer GetAllocatedData() const {
301 return data_.allocated.allocated_data;
304 size_type GetAllocatedCapacity() const {
305 return data_.allocated.allocated_capacity;
308 StorageView MakeStorageView() {
309 return GetIsAllocated() ? StorageView{GetAllocatedData(), GetSize(),
310 GetAllocatedCapacity()}
311 : StorageView{GetInlinedData(), GetSize(),
312 static_cast<size_type>(N)};
315 allocator_type* GetAllocPtr() {
316 return std::addressof(metadata_.template get<0>());
319 const allocator_type* GetAllocPtr() const {
320 return std::addressof(metadata_.template get<0>());
323 void SetIsAllocated() { GetSizeAndIsAllocated() |= 1; }
325 void UnsetIsAllocated() {
327 GetSizeAndIsAllocated() -= 1;
330 void SetAllocatedSize(size_type size) {
331 GetSizeAndIsAllocated() = (size << 1) | static_cast<size_type>(1);
334 void SetInlinedSize(size_type size) { GetSizeAndIsAllocated() = size << 1; }
336 void SetSize(size_type size) {
337 GetSizeAndIsAllocated() =
338 (size << 1) | static_cast<size_type>(GetIsAllocated());
341 void AddSize(size_type count) { GetSizeAndIsAllocated() += count << 1; }
343 void SubtractSize(size_type count) {
344 assert(count <= GetSize());
345 GetSizeAndIsAllocated() -= count << 1;
348 void SetAllocatedData(pointer data, size_type capacity) {
349 data_.allocated.allocated_data = data;
350 data_.allocated.allocated_capacity = capacity;
353 void DeallocateIfAllocated() {
354 if (GetIsAllocated()) {
355 AllocatorTraits::deallocate(*GetAllocPtr(), GetAllocatedData(),
356 GetAllocatedCapacity());
360 void AcquireAllocation(AllocationTransaction* allocation_tx_ptr) {
361 SetAllocatedData(allocation_tx_ptr->GetData(),
362 allocation_tx_ptr->GetCapacity());
363 allocation_tx_ptr->GetData() = nullptr;
364 allocation_tx_ptr->GetCapacity() = 0;
367 void MemcpyFrom(const Storage& other_storage) {
368 assert(IsMemcpyOk::value || other_storage.GetIsAllocated());
370 GetSizeAndIsAllocated() = other_storage.GetSizeAndIsAllocated();
371 data_ = other_storage.data_;
374 template <typename ValueAdapter>
375 void Initialize(ValueAdapter values, size_type new_size);
377 template <typename ValueAdapter>
378 void Assign(ValueAdapter values, size_type new_size);
380 template <typename ValueAdapter>
381 void Resize(ValueAdapter values, size_type new_size);
383 template <typename... Args>
384 reference EmplaceBack(Args&&... args);
386 iterator Erase(const_iterator from, const_iterator to);
388 void Reserve(size_type requested_capacity);
392 void Swap(Storage* other_storage_ptr);
395 size_type& GetSizeAndIsAllocated() { return metadata_.template get<1>(); }
397 const size_type& GetSizeAndIsAllocated() const {
398 return metadata_.template get<1>();
401 static size_type NextCapacityFrom(size_type current_capacity) {
402 return current_capacity * 2;
405 static size_type LegacyNextCapacityFrom(size_type current_capacity,
406 size_type requested_capacity) {
407 // TODO(johnsoncj): Get rid of this old behavior.
408 size_type new_capacity = current_capacity;
409 while (new_capacity < requested_capacity) {
416 container_internal::CompressedTuple<allocator_type, size_type>;
419 pointer allocated_data;
420 size_type allocated_capacity;
424 using InlinedDataElement =
425 absl::aligned_storage_t<sizeof(value_type), alignof(value_type)>;
426 InlinedDataElement inlined_data[N];
438 template <typename T, size_t N, typename A>
439 template <typename ValueAdapter>
440 auto Storage<T, N, A>::Initialize(ValueAdapter values, size_type new_size)
442 // Only callable from constructors!
443 assert(!GetIsAllocated());
444 assert(GetSize() == 0);
446 pointer construct_data;
448 if (new_size > static_cast<size_type>(N)) {
449 // Because this is only called from the `InlinedVector` constructors, it's
450 // safe to take on the allocation with size `0`. If `ConstructElements(...)`
451 // throws, deallocation will be automatically handled by `~Storage()`.
452 construct_data = AllocatorTraits::allocate(*GetAllocPtr(), new_size);
453 SetAllocatedData(construct_data, new_size);
456 construct_data = GetInlinedData();
459 inlined_vector_internal::ConstructElements(GetAllocPtr(), construct_data,
462 // Since the initial size was guaranteed to be `0` and the allocated bit is
463 // already correct for either case, *adding* `new_size` gives us the correct
464 // result faster than setting it directly.
468 template <typename T, size_t N, typename A>
469 template <typename ValueAdapter>
470 auto Storage<T, N, A>::Assign(ValueAdapter values, size_type new_size) -> void {
471 StorageView storage_view = MakeStorageView();
473 AllocationTransaction allocation_tx(GetAllocPtr());
475 absl::Span<value_type> assign_loop;
476 absl::Span<value_type> construct_loop;
477 absl::Span<value_type> destroy_loop;
479 if (new_size > storage_view.capacity) {
480 construct_loop = {allocation_tx.Allocate(new_size), new_size};
481 destroy_loop = {storage_view.data, storage_view.size};
482 } else if (new_size > storage_view.size) {
483 assign_loop = {storage_view.data, storage_view.size};
484 construct_loop = {storage_view.data + storage_view.size,
485 new_size - storage_view.size};
487 assign_loop = {storage_view.data, new_size};
488 destroy_loop = {storage_view.data + new_size, storage_view.size - new_size};
491 inlined_vector_internal::AssignElements(assign_loop.data(), &values,
494 inlined_vector_internal::ConstructElements(
495 GetAllocPtr(), construct_loop.data(), &values, construct_loop.size());
497 inlined_vector_internal::DestroyElements(GetAllocPtr(), destroy_loop.data(),
498 destroy_loop.size());
500 if (allocation_tx.DidAllocate()) {
501 DeallocateIfAllocated();
502 AcquireAllocation(&allocation_tx);
509 template <typename T, size_t N, typename A>
510 template <typename ValueAdapter>
511 auto Storage<T, N, A>::Resize(ValueAdapter values, size_type new_size) -> void {
512 StorageView storage_view = MakeStorageView();
514 AllocationTransaction allocation_tx(GetAllocPtr());
515 ConstructionTransaction construction_tx(GetAllocPtr());
517 IteratorValueAdapter<MoveIterator> move_values(
518 MoveIterator(storage_view.data));
520 absl::Span<value_type> construct_loop;
521 absl::Span<value_type> move_construct_loop;
522 absl::Span<value_type> destroy_loop;
524 if (new_size > storage_view.capacity) {
525 pointer new_data = allocation_tx.Allocate(
526 LegacyNextCapacityFrom(storage_view.capacity, new_size));
528 // Construct new objects in `new_data`
529 construct_loop = {new_data + storage_view.size,
530 new_size - storage_view.size};
532 // Move all existing objects into `new_data`
533 move_construct_loop = {new_data, storage_view.size};
535 // Destroy all existing objects in `storage_view.data`
536 destroy_loop = {storage_view.data, storage_view.size};
537 } else if (new_size > storage_view.size) {
538 // Construct new objects in `storage_view.data`
539 construct_loop = {storage_view.data + storage_view.size,
540 new_size - storage_view.size};
542 // Destroy end `storage_view.size - new_size` objects in `storage_view.data`
543 destroy_loop = {storage_view.data + new_size, storage_view.size - new_size};
546 construction_tx.Construct(construct_loop.data(), &values,
547 construct_loop.size());
549 inlined_vector_internal::ConstructElements(
550 GetAllocPtr(), move_construct_loop.data(), &move_values,
551 move_construct_loop.size());
553 inlined_vector_internal::DestroyElements(GetAllocPtr(), destroy_loop.data(),
554 destroy_loop.size());
556 construction_tx.Commit();
557 if (allocation_tx.DidAllocate()) {
558 DeallocateIfAllocated();
559 AcquireAllocation(&allocation_tx);
566 template <typename T, size_t N, typename A>
567 template <typename... Args>
568 auto Storage<T, N, A>::EmplaceBack(Args&&... args) -> reference {
569 StorageView storage_view = MakeStorageView();
571 AllocationTransaction allocation_tx(GetAllocPtr());
573 IteratorValueAdapter<MoveIterator> move_values(
574 MoveIterator(storage_view.data));
576 pointer construct_data =
577 (storage_view.size == storage_view.capacity
578 ? allocation_tx.Allocate(NextCapacityFrom(storage_view.capacity))
579 : storage_view.data);
581 pointer last_ptr = construct_data + storage_view.size;
582 AllocatorTraits::construct(*GetAllocPtr(), last_ptr,
583 std::forward<Args>(args)...);
585 if (allocation_tx.DidAllocate()) {
587 inlined_vector_internal::ConstructElements(
588 GetAllocPtr(), allocation_tx.GetData(), &move_values,
591 ABSL_INTERNAL_CATCH_ANY {
592 AllocatorTraits::destroy(*GetAllocPtr(), last_ptr);
593 ABSL_INTERNAL_RETHROW;
596 inlined_vector_internal::DestroyElements(GetAllocPtr(), storage_view.data,
599 DeallocateIfAllocated();
600 AcquireAllocation(&allocation_tx);
608 template <typename T, size_t N, typename A>
609 auto Storage<T, N, A>::Erase(const_iterator from, const_iterator to)
613 StorageView storage_view = MakeStorageView();
615 size_type erase_size = std::distance(from, to);
616 size_type erase_index =
617 std::distance(const_iterator(storage_view.data), from);
618 size_type erase_end_index = erase_index + erase_size;
620 IteratorValueAdapter<MoveIterator> move_values(
621 MoveIterator(storage_view.data + erase_end_index));
623 inlined_vector_internal::AssignElements(storage_view.data + erase_index,
625 storage_view.size - erase_end_index);
627 inlined_vector_internal::DestroyElements(
628 GetAllocPtr(), storage_view.data + (storage_view.size - erase_size),
631 SubtractSize(erase_size);
632 return iterator(storage_view.data + erase_index);
635 template <typename T, size_t N, typename A>
636 auto Storage<T, N, A>::Reserve(size_type requested_capacity) -> void {
637 StorageView storage_view = MakeStorageView();
639 if (ABSL_PREDICT_FALSE(requested_capacity <= storage_view.capacity)) return;
641 AllocationTransaction allocation_tx(GetAllocPtr());
643 IteratorValueAdapter<MoveIterator> move_values(
644 MoveIterator(storage_view.data));
646 pointer new_data = allocation_tx.Allocate(
647 LegacyNextCapacityFrom(storage_view.capacity, requested_capacity));
649 inlined_vector_internal::ConstructElements(GetAllocPtr(), new_data,
650 &move_values, storage_view.size);
652 inlined_vector_internal::DestroyElements(GetAllocPtr(), storage_view.data,
655 DeallocateIfAllocated();
656 AcquireAllocation(&allocation_tx);
660 template <typename T, size_t N, typename A>
661 auto Storage<T, N, A>::ShrinkToFit() -> void {
662 // May only be called on allocated instances!
663 assert(GetIsAllocated());
665 StorageView storage_view{GetAllocatedData(), GetSize(),
666 GetAllocatedCapacity()};
668 AllocationTransaction allocation_tx(GetAllocPtr());
670 IteratorValueAdapter<MoveIterator> move_values(
671 MoveIterator(storage_view.data));
673 pointer construct_data;
675 if (storage_view.size <= static_cast<size_type>(N)) {
676 construct_data = GetInlinedData();
677 } else if (storage_view.size < GetAllocatedCapacity()) {
678 construct_data = allocation_tx.Allocate(storage_view.size);
684 inlined_vector_internal::ConstructElements(GetAllocPtr(), construct_data,
685 &move_values, storage_view.size);
687 ABSL_INTERNAL_CATCH_ANY {
688 // Writing to inlined data will trample on the existing state, thus it needs
689 // to be restored when a construction fails.
690 SetAllocatedData(storage_view.data, storage_view.capacity);
691 ABSL_INTERNAL_RETHROW;
694 inlined_vector_internal::DestroyElements(GetAllocPtr(), storage_view.data,
697 AllocatorTraits::deallocate(*GetAllocPtr(), storage_view.data,
698 storage_view.capacity);
700 if (allocation_tx.DidAllocate()) {
701 AcquireAllocation(&allocation_tx);
707 template <typename T, size_t N, typename A>
708 auto Storage<T, N, A>::Swap(Storage* other_storage_ptr) -> void {
710 assert(this != other_storage_ptr);
712 if (GetIsAllocated() && other_storage_ptr->GetIsAllocated()) {
713 // Both are allocated, thus we can swap the allocations at the top level.
715 swap(data_.allocated, other_storage_ptr->data_.allocated);
716 } else if (!GetIsAllocated() && !other_storage_ptr->GetIsAllocated()) {
717 // Both are inlined, thus element-wise swap up to smaller size, then move
718 // the remaining elements.
720 Storage* small_ptr = this;
721 Storage* large_ptr = other_storage_ptr;
722 if (small_ptr->GetSize() > large_ptr->GetSize()) swap(small_ptr, large_ptr);
724 for (size_type i = 0; i < small_ptr->GetSize(); ++i) {
725 swap(small_ptr->GetInlinedData()[i], large_ptr->GetInlinedData()[i]);
728 IteratorValueAdapter<MoveIterator> move_values(
729 MoveIterator(large_ptr->GetInlinedData() + small_ptr->GetSize()));
731 inlined_vector_internal::ConstructElements(
732 large_ptr->GetAllocPtr(),
733 small_ptr->GetInlinedData() + small_ptr->GetSize(), &move_values,
734 large_ptr->GetSize() - small_ptr->GetSize());
736 inlined_vector_internal::DestroyElements(
737 large_ptr->GetAllocPtr(),
738 large_ptr->GetInlinedData() + small_ptr->GetSize(),
739 large_ptr->GetSize() - small_ptr->GetSize());
741 // One is allocated and the other is inlined, thus we first move the
742 // elements from the inlined instance to the inlined space in the allocated
743 // instance and then we can finish by having the other vector take on the
746 Storage* allocated_ptr = this;
747 Storage* inlined_ptr = other_storage_ptr;
748 if (!allocated_ptr->GetIsAllocated()) swap(allocated_ptr, inlined_ptr);
750 StorageView allocated_storage_view{allocated_ptr->GetAllocatedData(),
751 allocated_ptr->GetSize(),
752 allocated_ptr->GetAllocatedCapacity()};
754 IteratorValueAdapter<MoveIterator> move_values(
755 MoveIterator(inlined_ptr->GetInlinedData()));
758 inlined_vector_internal::ConstructElements(
759 inlined_ptr->GetAllocPtr(), allocated_ptr->GetInlinedData(),
760 &move_values, inlined_ptr->GetSize());
762 ABSL_INTERNAL_CATCH_ANY {
763 // Writing to inlined data will trample on the existing state, thus it
764 // needs to be restored when a construction fails.
765 allocated_ptr->SetAllocatedData(allocated_storage_view.data,
766 allocated_storage_view.capacity);
767 ABSL_INTERNAL_RETHROW;
770 inlined_vector_internal::DestroyElements(inlined_ptr->GetAllocPtr(),
771 inlined_ptr->GetInlinedData(),
772 inlined_ptr->GetSize());
774 inlined_ptr->SetAllocatedData(allocated_storage_view.data,
775 allocated_storage_view.capacity);
778 // All cases swap the size, `is_allocated` boolean and the allocator.
779 swap(GetSizeAndIsAllocated(), other_storage_ptr->GetSizeAndIsAllocated());
780 swap(*GetAllocPtr(), *other_storage_ptr->GetAllocPtr());
783 } // namespace inlined_vector_internal
786 #endif // ABSL_CONTAINER_INTERNAL_INLINED_VECTOR_INTERNAL_H_