/src/abseil-cpp/absl/container/internal/inlined_vector.h
Line | Count | Source (jump to first uncovered line) |
1 | | // Copyright 2019 The Abseil Authors. |
2 | | // |
3 | | // Licensed under the Apache License, Version 2.0 (the "License"); |
4 | | // you may not use this file except in compliance with the License. |
5 | | // You may obtain a copy of the License at |
6 | | // |
7 | | // https://www.apache.org/licenses/LICENSE-2.0 |
8 | | // |
9 | | // Unless required by applicable law or agreed to in writing, software |
10 | | // distributed under the License is distributed on an "AS IS" BASIS, |
11 | | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
12 | | // See the License for the specific language governing permissions and |
13 | | // limitations under the License. |
14 | | |
15 | | #ifndef ABSL_CONTAINER_INTERNAL_INLINED_VECTOR_H_ |
16 | | #define ABSL_CONTAINER_INTERNAL_INLINED_VECTOR_H_ |
17 | | |
18 | | #include <algorithm> |
19 | | #include <cstddef> |
20 | | #include <cstring> |
21 | | #include <iterator> |
22 | | #include <limits> |
23 | | #include <memory> |
24 | | #include <new> |
25 | | #include <type_traits> |
26 | | #include <utility> |
27 | | |
28 | | #include "absl/base/attributes.h" |
29 | | #include "absl/base/macros.h" |
30 | | #include "absl/container/internal/compressed_tuple.h" |
31 | | #include "absl/memory/memory.h" |
32 | | #include "absl/meta/type_traits.h" |
33 | | #include "absl/types/span.h" |
34 | | |
35 | | namespace absl { |
36 | | ABSL_NAMESPACE_BEGIN |
37 | | namespace inlined_vector_internal { |
38 | | |
39 | | // GCC does not deal very well with the below code |
40 | | #if !defined(__clang__) && defined(__GNUC__) |
41 | | #pragma GCC diagnostic push |
42 | | #pragma GCC diagnostic ignored "-Warray-bounds" |
43 | | #endif |
44 | | |
45 | | template <typename A> |
46 | | using AllocatorTraits = std::allocator_traits<A>; |
47 | | template <typename A> |
48 | | using ValueType = typename AllocatorTraits<A>::value_type; |
49 | | template <typename A> |
50 | | using SizeType = typename AllocatorTraits<A>::size_type; |
51 | | template <typename A> |
52 | | using Pointer = typename AllocatorTraits<A>::pointer; |
53 | | template <typename A> |
54 | | using ConstPointer = typename AllocatorTraits<A>::const_pointer; |
55 | | template <typename A> |
56 | | using SizeType = typename AllocatorTraits<A>::size_type; |
57 | | template <typename A> |
58 | | using DifferenceType = typename AllocatorTraits<A>::difference_type; |
59 | | template <typename A> |
60 | | using Reference = ValueType<A>&; |
61 | | template <typename A> |
62 | | using ConstReference = const ValueType<A>&; |
63 | | template <typename A> |
64 | | using Iterator = Pointer<A>; |
65 | | template <typename A> |
66 | | using ConstIterator = ConstPointer<A>; |
67 | | template <typename A> |
68 | | using ReverseIterator = typename std::reverse_iterator<Iterator<A>>; |
69 | | template <typename A> |
70 | | using ConstReverseIterator = typename std::reverse_iterator<ConstIterator<A>>; |
71 | | template <typename A> |
72 | | using MoveIterator = typename std::move_iterator<Iterator<A>>; |
73 | | |
74 | | template <typename Iterator> |
75 | | using IsAtLeastForwardIterator = std::is_convertible< |
76 | | typename std::iterator_traits<Iterator>::iterator_category, |
77 | | std::forward_iterator_tag>; |
78 | | |
79 | | template <typename A> |
80 | | using IsMoveAssignOk = std::is_move_assignable<ValueType<A>>; |
81 | | template <typename A> |
82 | | using IsSwapOk = absl::type_traits_internal::IsSwappable<ValueType<A>>; |
83 | | |
84 | | template <typename T> |
85 | | struct TypeIdentity { |
86 | | using type = T; |
87 | | }; |
88 | | |
89 | | // Used for function arguments in template functions to prevent ADL by forcing |
90 | | // callers to explicitly specify the template parameter. |
91 | | template <typename T> |
92 | | using NoTypeDeduction = typename TypeIdentity<T>::type; |
93 | | |
94 | | template <typename A, bool IsTriviallyDestructible = |
95 | | absl::is_trivially_destructible<ValueType<A>>::value> |
96 | | struct DestroyAdapter; |
97 | | |
98 | | template <typename A> |
99 | | struct DestroyAdapter<A, /* IsTriviallyDestructible */ false> { |
100 | | static void DestroyElements(A& allocator, Pointer<A> destroy_first, |
101 | | SizeType<A> destroy_size) { |
102 | | for (SizeType<A> i = destroy_size; i != 0;) { |
103 | | --i; |
104 | | AllocatorTraits<A>::destroy(allocator, destroy_first + i); |
105 | | } |
106 | | } |
107 | | }; |
108 | | |
109 | | template <typename A> |
110 | | struct DestroyAdapter<A, /* IsTriviallyDestructible */ true> { |
111 | | static void DestroyElements(A& allocator, Pointer<A> destroy_first, |
112 | 0 | SizeType<A> destroy_size) { |
113 | 0 | static_cast<void>(allocator); |
114 | 0 | static_cast<void>(destroy_first); |
115 | 0 | static_cast<void>(destroy_size); |
116 | 0 | } Unexecuted instantiation: absl::inlined_vector_internal::DestroyAdapter<std::__1::allocator<absl::str_format_internal::FormatArgImpl>, true>::DestroyElements(std::__1::allocator<absl::str_format_internal::FormatArgImpl>&, absl::str_format_internal::FormatArgImpl*, unsigned long) Unexecuted instantiation: absl::inlined_vector_internal::DestroyAdapter<std::__1::allocator<absl::LogSink*>, true>::DestroyElements(std::__1::allocator<absl::LogSink*>&, absl::LogSink**, unsigned long) |
117 | | }; |
118 | | |
119 | | template <typename A> |
120 | | struct Allocation { |
121 | | Pointer<A> data = nullptr; |
122 | | SizeType<A> capacity = 0; |
123 | | }; |
124 | | |
125 | | template <typename A, |
126 | | bool IsOverAligned = |
127 | | (alignof(ValueType<A>) > ABSL_INTERNAL_DEFAULT_NEW_ALIGNMENT)> |
128 | | struct MallocAdapter { |
129 | 0 | static Allocation<A> Allocate(A& allocator, SizeType<A> requested_capacity) { |
130 | 0 | return {AllocatorTraits<A>::allocate(allocator, requested_capacity), |
131 | 0 | requested_capacity}; |
132 | 0 | } Unexecuted instantiation: absl::inlined_vector_internal::MallocAdapter<std::__1::allocator<absl::str_format_internal::FormatArgImpl>, false>::Allocate(std::__1::allocator<absl::str_format_internal::FormatArgImpl>&, unsigned long) Unexecuted instantiation: absl::inlined_vector_internal::MallocAdapter<std::__1::allocator<absl::LogSink*>, false>::Allocate(std::__1::allocator<absl::LogSink*>&, unsigned long) |
133 | | |
134 | | static void Deallocate(A& allocator, Pointer<A> pointer, |
135 | 0 | SizeType<A> capacity) { |
136 | 0 | AllocatorTraits<A>::deallocate(allocator, pointer, capacity); |
137 | 0 | } Unexecuted instantiation: absl::inlined_vector_internal::MallocAdapter<std::__1::allocator<absl::str_format_internal::FormatArgImpl>, false>::Deallocate(std::__1::allocator<absl::str_format_internal::FormatArgImpl>&, absl::str_format_internal::FormatArgImpl*, unsigned long) Unexecuted instantiation: absl::inlined_vector_internal::MallocAdapter<std::__1::allocator<absl::LogSink*>, false>::Deallocate(std::__1::allocator<absl::LogSink*>&, absl::LogSink**, unsigned long) |
138 | | }; |
139 | | |
140 | | template <typename A, typename ValueAdapter> |
141 | | void ConstructElements(NoTypeDeduction<A>& allocator, |
142 | | Pointer<A> construct_first, ValueAdapter& values, |
143 | 0 | SizeType<A> construct_size) { |
144 | 0 | for (SizeType<A> i = 0; i < construct_size; ++i) { |
145 | 0 | ABSL_INTERNAL_TRY { values.ConstructNext(allocator, construct_first + i); } |
146 | 0 | ABSL_INTERNAL_CATCH_ANY { |
147 | 0 | DestroyAdapter<A>::DestroyElements(allocator, construct_first, i); |
148 | 0 | ABSL_INTERNAL_RETHROW; |
149 | 0 | } |
150 | 0 | } |
151 | 0 | } Unexecuted instantiation: void absl::inlined_vector_internal::ConstructElements<std::__1::allocator<absl::str_format_internal::FormatArgImpl>, absl::inlined_vector_internal::IteratorValueAdapter<std::__1::allocator<absl::str_format_internal::FormatArgImpl>, absl::str_format_internal::FormatArgImpl const*> >(absl::inlined_vector_internal::TypeIdentity<std::__1::allocator<absl::str_format_internal::FormatArgImpl> >::type&, std::__1::allocator_traits<std::__1::allocator<absl::str_format_internal::FormatArgImpl> >::pointer, absl::inlined_vector_internal::IteratorValueAdapter<std::__1::allocator<absl::str_format_internal::FormatArgImpl>, absl::str_format_internal::FormatArgImpl const*>&, std::__1::allocator_traits<std::__1::allocator<absl::str_format_internal::FormatArgImpl> >::size_type) Unexecuted instantiation: void absl::inlined_vector_internal::ConstructElements<std::__1::allocator<absl::str_format_internal::FormatArgImpl>, absl::inlined_vector_internal::IteratorValueAdapter<std::__1::allocator<absl::str_format_internal::FormatArgImpl>, std::__1::move_iterator<absl::str_format_internal::FormatArgImpl*> > >(absl::inlined_vector_internal::TypeIdentity<std::__1::allocator<absl::str_format_internal::FormatArgImpl> >::type&, std::__1::allocator_traits<std::__1::allocator<absl::str_format_internal::FormatArgImpl> >::pointer, absl::inlined_vector_internal::IteratorValueAdapter<std::__1::allocator<absl::str_format_internal::FormatArgImpl>, std::__1::move_iterator<absl::str_format_internal::FormatArgImpl*> >&, std::__1::allocator_traits<std::__1::allocator<absl::str_format_internal::FormatArgImpl> >::size_type) Unexecuted instantiation: void absl::inlined_vector_internal::ConstructElements<std::__1::allocator<absl::LogSink*>, absl::inlined_vector_internal::IteratorValueAdapter<std::__1::allocator<absl::LogSink*>, std::__1::move_iterator<absl::LogSink**> > >(absl::inlined_vector_internal::TypeIdentity<std::__1::allocator<absl::LogSink*> >::type&, std::__1::allocator_traits<std::__1::allocator<absl::LogSink*> >::pointer, absl::inlined_vector_internal::IteratorValueAdapter<std::__1::allocator<absl::LogSink*>, std::__1::move_iterator<absl::LogSink**> >&, std::__1::allocator_traits<std::__1::allocator<absl::LogSink*> >::size_type) |
152 | | |
153 | | template <typename A, typename ValueAdapter> |
154 | | void AssignElements(Pointer<A> assign_first, ValueAdapter& values, |
155 | | SizeType<A> assign_size) { |
156 | | for (SizeType<A> i = 0; i < assign_size; ++i) { |
157 | | values.AssignNext(assign_first + i); |
158 | | } |
159 | | } |
160 | | |
161 | | template <typename A> |
162 | | struct StorageView { |
163 | | Pointer<A> data; |
164 | | SizeType<A> size; |
165 | | SizeType<A> capacity; |
166 | | }; |
167 | | |
168 | | template <typename A, typename Iterator> |
169 | | class IteratorValueAdapter { |
170 | | public: |
171 | 0 | explicit IteratorValueAdapter(const Iterator& it) : it_(it) {} Unexecuted instantiation: absl::inlined_vector_internal::IteratorValueAdapter<std::__1::allocator<absl::LogSink*>, std::__1::move_iterator<absl::LogSink**> >::IteratorValueAdapter(std::__1::move_iterator<absl::LogSink**> const&) Unexecuted instantiation: absl::inlined_vector_internal::IteratorValueAdapter<std::__1::allocator<absl::str_format_internal::FormatArgImpl>, absl::str_format_internal::FormatArgImpl const*>::IteratorValueAdapter(absl::str_format_internal::FormatArgImpl const* const&) |
172 | | |
173 | 0 | void ConstructNext(A& allocator, Pointer<A> construct_at) { |
174 | 0 | AllocatorTraits<A>::construct(allocator, construct_at, *it_); |
175 | 0 | ++it_; |
176 | 0 | } Unexecuted instantiation: absl::inlined_vector_internal::IteratorValueAdapter<std::__1::allocator<absl::str_format_internal::FormatArgImpl>, absl::str_format_internal::FormatArgImpl const*>::ConstructNext(std::__1::allocator<absl::str_format_internal::FormatArgImpl>&, absl::str_format_internal::FormatArgImpl*) Unexecuted instantiation: absl::inlined_vector_internal::IteratorValueAdapter<std::__1::allocator<absl::str_format_internal::FormatArgImpl>, std::__1::move_iterator<absl::str_format_internal::FormatArgImpl*> >::ConstructNext(std::__1::allocator<absl::str_format_internal::FormatArgImpl>&, absl::str_format_internal::FormatArgImpl*) Unexecuted instantiation: absl::inlined_vector_internal::IteratorValueAdapter<std::__1::allocator<absl::LogSink*>, std::__1::move_iterator<absl::LogSink**> >::ConstructNext(std::__1::allocator<absl::LogSink*>&, absl::LogSink**) |
177 | | |
178 | | void AssignNext(Pointer<A> assign_at) { |
179 | | *assign_at = *it_; |
180 | | ++it_; |
181 | | } |
182 | | |
183 | | private: |
184 | | Iterator it_; |
185 | | }; |
186 | | |
187 | | template <typename A> |
188 | | class CopyValueAdapter { |
189 | | public: |
190 | | explicit CopyValueAdapter(ConstPointer<A> p) : ptr_(p) {} |
191 | | |
192 | | void ConstructNext(A& allocator, Pointer<A> construct_at) { |
193 | | AllocatorTraits<A>::construct(allocator, construct_at, *ptr_); |
194 | | } |
195 | | |
196 | | void AssignNext(Pointer<A> assign_at) { *assign_at = *ptr_; } |
197 | | |
198 | | private: |
199 | | ConstPointer<A> ptr_; |
200 | | }; |
201 | | |
202 | | template <typename A> |
203 | | class DefaultValueAdapter { |
204 | | public: |
205 | | explicit DefaultValueAdapter() {} |
206 | | |
207 | | void ConstructNext(A& allocator, Pointer<A> construct_at) { |
208 | | AllocatorTraits<A>::construct(allocator, construct_at); |
209 | | } |
210 | | |
211 | | void AssignNext(Pointer<A> assign_at) { *assign_at = ValueType<A>(); } |
212 | | }; |
213 | | |
214 | | template <typename A> |
215 | | class AllocationTransaction { |
216 | | public: |
217 | | explicit AllocationTransaction(A& allocator) |
218 | 0 | : allocator_data_(allocator, nullptr), capacity_(0) {} |
219 | | |
220 | 0 | ~AllocationTransaction() { |
221 | 0 | if (DidAllocate()) { |
222 | 0 | MallocAdapter<A>::Deallocate(GetAllocator(), GetData(), GetCapacity()); |
223 | 0 | } |
224 | 0 | } |
225 | | |
226 | | AllocationTransaction(const AllocationTransaction&) = delete; |
227 | | void operator=(const AllocationTransaction&) = delete; |
228 | | |
229 | 0 | A& GetAllocator() { return allocator_data_.template get<0>(); } |
230 | 0 | Pointer<A>& GetData() { return allocator_data_.template get<1>(); } |
231 | 0 | SizeType<A>& GetCapacity() { return capacity_; } |
232 | | |
233 | 0 | bool DidAllocate() { return GetData() != nullptr; } |
234 | | |
235 | 0 | Pointer<A> Allocate(SizeType<A> requested_capacity) { |
236 | 0 | Allocation<A> result = |
237 | 0 | MallocAdapter<A>::Allocate(GetAllocator(), requested_capacity); |
238 | 0 | GetData() = result.data; |
239 | 0 | GetCapacity() = result.capacity; |
240 | 0 | return result.data; |
241 | 0 | } |
242 | | |
243 | 0 | ABSL_MUST_USE_RESULT Allocation<A> Release() && { |
244 | 0 | Allocation<A> result = {GetData(), GetCapacity()}; |
245 | 0 | Reset(); |
246 | 0 | return result; |
247 | 0 | } |
248 | | |
249 | | private: |
250 | 0 | void Reset() { |
251 | 0 | GetData() = nullptr; |
252 | 0 | GetCapacity() = 0; |
253 | 0 | } |
254 | | |
255 | | container_internal::CompressedTuple<A, Pointer<A>> allocator_data_; |
256 | | SizeType<A> capacity_; |
257 | | }; |
258 | | |
259 | | template <typename A> |
260 | | class ConstructionTransaction { |
261 | | public: |
262 | | explicit ConstructionTransaction(A& allocator) |
263 | | : allocator_data_(allocator, nullptr), size_(0) {} |
264 | | |
265 | | ~ConstructionTransaction() { |
266 | | if (DidConstruct()) { |
267 | | DestroyAdapter<A>::DestroyElements(GetAllocator(), GetData(), GetSize()); |
268 | | } |
269 | | } |
270 | | |
271 | | ConstructionTransaction(const ConstructionTransaction&) = delete; |
272 | | void operator=(const ConstructionTransaction&) = delete; |
273 | | |
274 | | A& GetAllocator() { return allocator_data_.template get<0>(); } |
275 | | Pointer<A>& GetData() { return allocator_data_.template get<1>(); } |
276 | | SizeType<A>& GetSize() { return size_; } |
277 | | |
278 | | bool DidConstruct() { return GetData() != nullptr; } |
279 | | template <typename ValueAdapter> |
280 | | void Construct(Pointer<A> data, ValueAdapter& values, SizeType<A> size) { |
281 | | ConstructElements<A>(GetAllocator(), data, values, size); |
282 | | GetData() = data; |
283 | | GetSize() = size; |
284 | | } |
285 | | void Commit() && { |
286 | | GetData() = nullptr; |
287 | | GetSize() = 0; |
288 | | } |
289 | | |
290 | | private: |
291 | | container_internal::CompressedTuple<A, Pointer<A>> allocator_data_; |
292 | | SizeType<A> size_; |
293 | | }; |
294 | | |
295 | | template <typename T, size_t N, typename A> |
296 | | class Storage { |
297 | | public: |
298 | | struct MemcpyPolicy {}; |
299 | | struct ElementwiseAssignPolicy {}; |
300 | | struct ElementwiseSwapPolicy {}; |
301 | | struct ElementwiseConstructPolicy {}; |
302 | | |
303 | | using MoveAssignmentPolicy = absl::conditional_t< |
304 | | // Fast path: if the value type can be trivially move assigned and |
305 | | // destroyed, and we know the allocator doesn't do anything fancy, then |
306 | | // it's safe for us to simply adopt the contents of the storage for |
307 | | // `other` and remove its own reference to them. It's as if we had |
308 | | // individually move-assigned each value and then destroyed the original. |
309 | | absl::conjunction<absl::is_trivially_move_assignable<ValueType<A>>, |
310 | | absl::is_trivially_destructible<ValueType<A>>, |
311 | | std::is_same<A, std::allocator<ValueType<A>>>>::value, |
312 | | MemcpyPolicy, |
313 | | // Otherwise we use move assignment if possible. If not, we simulate |
314 | | // move assignment using move construction. |
315 | | // |
316 | | // Note that this is in contrast to e.g. std::vector and std::optional, |
317 | | // which are themselves not move-assignable when their contained type is |
318 | | // not. |
319 | | absl::conditional_t<IsMoveAssignOk<A>::value, ElementwiseAssignPolicy, |
320 | | ElementwiseConstructPolicy>>; |
321 | | |
322 | | // The policy to be used specifically when swapping inlined elements. |
323 | | using SwapInlinedElementsPolicy = absl::conditional_t< |
324 | | // Fast path: if the value type can be trivially move constructed/assigned |
325 | | // and destroyed, and we know the allocator doesn't do anything fancy, |
326 | | // then it's safe for us to simply swap the bytes in the inline storage. |
327 | | // It's as if we had move-constructed a temporary vector, move-assigned |
328 | | // one to the other, then move-assigned the first from the temporary. |
329 | | absl::conjunction<absl::is_trivially_move_constructible<ValueType<A>>, |
330 | | absl::is_trivially_move_assignable<ValueType<A>>, |
331 | | absl::is_trivially_destructible<ValueType<A>>, |
332 | | std::is_same<A, std::allocator<ValueType<A>>>>::value, |
333 | | MemcpyPolicy, |
334 | | absl::conditional_t<IsSwapOk<A>::value, ElementwiseSwapPolicy, |
335 | | ElementwiseConstructPolicy>>; |
336 | | |
337 | 0 | static SizeType<A> NextCapacity(SizeType<A> current_capacity) { |
338 | 0 | return current_capacity * 2; |
339 | 0 | } Unexecuted instantiation: absl::inlined_vector_internal::Storage<absl::str_format_internal::FormatArgImpl, 4ul, std::__1::allocator<absl::str_format_internal::FormatArgImpl> >::NextCapacity(unsigned long) Unexecuted instantiation: absl::inlined_vector_internal::Storage<absl::LogSink*, 16ul, std::__1::allocator<absl::LogSink*> >::NextCapacity(unsigned long) |
340 | | |
341 | | static SizeType<A> ComputeCapacity(SizeType<A> current_capacity, |
342 | 0 | SizeType<A> requested_capacity) { |
343 | 0 | return (std::max)(NextCapacity(current_capacity), requested_capacity); |
344 | 0 | } |
345 | | |
346 | | // --------------------------------------------------------------------------- |
347 | | // Storage Constructors and Destructor |
348 | | // --------------------------------------------------------------------------- |
349 | | |
350 | 3.24M | Storage() : metadata_(A(), /* size and is_allocated */ 0u) {} |
351 | | |
352 | | explicit Storage(const A& allocator) |
353 | 0 | : metadata_(allocator, /* size and is_allocated */ 0u) {} |
354 | | |
355 | 3.24M | ~Storage() { |
356 | | // Fast path: if we are empty and not allocated, there's nothing to do. |
357 | 3.24M | if (GetSizeAndIsAllocated() == 0) { |
358 | 3.24M | return; |
359 | 3.24M | } |
360 | | |
361 | | // Fast path: if no destructors need to be run and we know the allocator |
362 | | // doesn't do anything fancy, then all we need to do is deallocate (and |
363 | | // maybe not even that). |
364 | 0 | if (absl::is_trivially_destructible<ValueType<A>>::value && |
365 | 0 | std::is_same<A, std::allocator<ValueType<A>>>::value) { |
366 | 0 | DeallocateIfAllocated(); |
367 | 0 | return; |
368 | 0 | } |
369 | | |
370 | 0 | DestroyContents(); |
371 | 0 | } absl::inlined_vector_internal::Storage<absl::LogSink*, 16ul, std::__1::allocator<absl::LogSink*> >::~Storage() Line | Count | Source | 355 | 3.24M | ~Storage() { | 356 | | // Fast path: if we are empty and not allocated, there's nothing to do. | 357 | 3.24M | if (GetSizeAndIsAllocated() == 0) { | 358 | 3.24M | return; | 359 | 3.24M | } | 360 | | | 361 | | // Fast path: if no destructors need to be run and we know the allocator | 362 | | // doesn't do anything fancy, then all we need to do is deallocate (and | 363 | | // maybe not even that). | 364 | 0 | if (absl::is_trivially_destructible<ValueType<A>>::value && | 365 | 0 | std::is_same<A, std::allocator<ValueType<A>>>::value) { | 366 | 0 | DeallocateIfAllocated(); | 367 | 0 | return; | 368 | 0 | } | 369 | | | 370 | 0 | DestroyContents(); | 371 | 0 | } |
Unexecuted instantiation: absl::inlined_vector_internal::Storage<absl::str_format_internal::FormatArgImpl, 4ul, std::__1::allocator<absl::str_format_internal::FormatArgImpl> >::~Storage() |
372 | | |
373 | | // --------------------------------------------------------------------------- |
374 | | // Storage Member Accessors |
375 | | // --------------------------------------------------------------------------- |
376 | | |
377 | 3.24M | SizeType<A>& GetSizeAndIsAllocated() { return metadata_.template get<1>(); } Unexecuted instantiation: absl::inlined_vector_internal::Storage<absl::str_format_internal::FormatArgImpl, 4ul, std::__1::allocator<absl::str_format_internal::FormatArgImpl> >::GetSizeAndIsAllocated() absl::inlined_vector_internal::Storage<absl::LogSink*, 16ul, std::__1::allocator<absl::LogSink*> >::GetSizeAndIsAllocated() Line | Count | Source | 377 | 3.24M | SizeType<A>& GetSizeAndIsAllocated() { return metadata_.template get<1>(); } |
|
378 | | |
379 | 6.48M | const SizeType<A>& GetSizeAndIsAllocated() const { |
380 | 6.48M | return metadata_.template get<1>(); |
381 | 6.48M | } Unexecuted instantiation: absl::inlined_vector_internal::Storage<absl::str_format_internal::FormatArgImpl, 4ul, std::__1::allocator<absl::str_format_internal::FormatArgImpl> >::GetSizeAndIsAllocated() const absl::inlined_vector_internal::Storage<absl::LogSink*, 16ul, std::__1::allocator<absl::LogSink*> >::GetSizeAndIsAllocated() const Line | Count | Source | 379 | 6.48M | const SizeType<A>& GetSizeAndIsAllocated() const { | 380 | 6.48M | return metadata_.template get<1>(); | 381 | 6.48M | } |
|
382 | | |
383 | 3.24M | SizeType<A> GetSize() const { return GetSizeAndIsAllocated() >> 1; } Unexecuted instantiation: absl::inlined_vector_internal::Storage<absl::str_format_internal::FormatArgImpl, 4ul, std::__1::allocator<absl::str_format_internal::FormatArgImpl> >::GetSize() const absl::inlined_vector_internal::Storage<absl::LogSink*, 16ul, std::__1::allocator<absl::LogSink*> >::GetSize() const Line | Count | Source | 383 | 3.24M | SizeType<A> GetSize() const { return GetSizeAndIsAllocated() >> 1; } |
|
384 | | |
385 | 3.24M | bool GetIsAllocated() const { return GetSizeAndIsAllocated() & 1; } Unexecuted instantiation: absl::inlined_vector_internal::Storage<absl::str_format_internal::FormatArgImpl, 4ul, std::__1::allocator<absl::str_format_internal::FormatArgImpl> >::GetIsAllocated() const absl::inlined_vector_internal::Storage<absl::LogSink*, 16ul, std::__1::allocator<absl::LogSink*> >::GetIsAllocated() const Line | Count | Source | 385 | 3.24M | bool GetIsAllocated() const { return GetSizeAndIsAllocated() & 1; } |
|
386 | | |
387 | 0 | Pointer<A> GetAllocatedData() { return data_.allocated.allocated_data; } Unexecuted instantiation: absl::inlined_vector_internal::Storage<absl::str_format_internal::FormatArgImpl, 4ul, std::__1::allocator<absl::str_format_internal::FormatArgImpl> >::GetAllocatedData() Unexecuted instantiation: absl::inlined_vector_internal::Storage<absl::LogSink*, 16ul, std::__1::allocator<absl::LogSink*> >::GetAllocatedData() |
388 | | |
389 | 0 | ConstPointer<A> GetAllocatedData() const { |
390 | 0 | return data_.allocated.allocated_data; |
391 | 0 | } |
392 | | |
393 | | // ABSL_ATTRIBUTE_NO_SANITIZE_CFI is used because the memory pointed to may be |
394 | | // uninitialized, a common pattern in allocate()+construct() APIs. |
395 | | // https://clang.llvm.org/docs/ControlFlowIntegrity.html#bad-cast-checking |
396 | | // NOTE: When this was written, LLVM documentation did not explicitly |
397 | | // mention that casting `char*` and using `reinterpret_cast` qualifies |
398 | | // as a bad cast. |
399 | 3.24M | ABSL_ATTRIBUTE_NO_SANITIZE_CFI Pointer<A> GetInlinedData() { |
400 | 3.24M | return reinterpret_cast<Pointer<A>>(data_.inlined.inlined_data); |
401 | 3.24M | } Unexecuted instantiation: absl::inlined_vector_internal::Storage<absl::str_format_internal::FormatArgImpl, 4ul, std::__1::allocator<absl::str_format_internal::FormatArgImpl> >::GetInlinedData() absl::inlined_vector_internal::Storage<absl::LogSink*, 16ul, std::__1::allocator<absl::LogSink*> >::GetInlinedData() Line | Count | Source | 399 | 3.24M | ABSL_ATTRIBUTE_NO_SANITIZE_CFI Pointer<A> GetInlinedData() { | 400 | 3.24M | return reinterpret_cast<Pointer<A>>(data_.inlined.inlined_data); | 401 | 3.24M | } |
|
402 | | |
403 | 0 | ABSL_ATTRIBUTE_NO_SANITIZE_CFI ConstPointer<A> GetInlinedData() const { |
404 | 0 | return reinterpret_cast<ConstPointer<A>>(data_.inlined.inlined_data); |
405 | 0 | } |
406 | | |
407 | 0 | SizeType<A> GetAllocatedCapacity() const { |
408 | 0 | return data_.allocated.allocated_capacity; |
409 | 0 | } Unexecuted instantiation: absl::inlined_vector_internal::Storage<absl::str_format_internal::FormatArgImpl, 4ul, std::__1::allocator<absl::str_format_internal::FormatArgImpl> >::GetAllocatedCapacity() const Unexecuted instantiation: absl::inlined_vector_internal::Storage<absl::LogSink*, 16ul, std::__1::allocator<absl::LogSink*> >::GetAllocatedCapacity() const |
410 | | |
411 | 0 | SizeType<A> GetInlinedCapacity() const { |
412 | 0 | return static_cast<SizeType<A>>(kOptimalInlinedSize); |
413 | 0 | } Unexecuted instantiation: absl::inlined_vector_internal::Storage<absl::str_format_internal::FormatArgImpl, 4ul, std::__1::allocator<absl::str_format_internal::FormatArgImpl> >::GetInlinedCapacity() const Unexecuted instantiation: absl::inlined_vector_internal::Storage<absl::LogSink*, 16ul, std::__1::allocator<absl::LogSink*> >::GetInlinedCapacity() const |
414 | | |
415 | 0 | StorageView<A> MakeStorageView() { |
416 | 0 | return GetIsAllocated() ? StorageView<A>{GetAllocatedData(), GetSize(), |
417 | 0 | GetAllocatedCapacity()} |
418 | 0 | : StorageView<A>{GetInlinedData(), GetSize(), |
419 | 0 | GetInlinedCapacity()}; |
420 | 0 | } |
421 | | |
422 | 0 | A& GetAllocator() { return metadata_.template get<0>(); } Unexecuted instantiation: absl::inlined_vector_internal::Storage<absl::str_format_internal::FormatArgImpl, 4ul, std::__1::allocator<absl::str_format_internal::FormatArgImpl> >::GetAllocator() Unexecuted instantiation: absl::inlined_vector_internal::Storage<absl::LogSink*, 16ul, std::__1::allocator<absl::LogSink*> >::GetAllocator() |
423 | | |
424 | | const A& GetAllocator() const { return metadata_.template get<0>(); } |
425 | | |
426 | | // --------------------------------------------------------------------------- |
427 | | // Storage Member Mutators |
428 | | // --------------------------------------------------------------------------- |
429 | | |
430 | | ABSL_ATTRIBUTE_NOINLINE void InitFrom(const Storage& other); |
431 | | |
432 | | template <typename ValueAdapter> |
433 | | void Initialize(ValueAdapter values, SizeType<A> new_size); |
434 | | |
435 | | template <typename ValueAdapter> |
436 | | void Assign(ValueAdapter values, SizeType<A> new_size); |
437 | | |
438 | | template <typename ValueAdapter> |
439 | | void Resize(ValueAdapter values, SizeType<A> new_size); |
440 | | |
441 | | template <typename ValueAdapter> |
442 | | Iterator<A> Insert(ConstIterator<A> pos, ValueAdapter values, |
443 | | SizeType<A> insert_count); |
444 | | |
445 | | template <typename... Args> |
446 | | Reference<A> EmplaceBack(Args&&... args); |
447 | | |
448 | | Iterator<A> Erase(ConstIterator<A> from, ConstIterator<A> to); |
449 | | |
450 | | void Reserve(SizeType<A> requested_capacity); |
451 | | |
452 | | void ShrinkToFit(); |
453 | | |
454 | | void Swap(Storage* other_storage_ptr); |
455 | | |
456 | 0 | void SetIsAllocated() { |
457 | 0 | GetSizeAndIsAllocated() |= static_cast<SizeType<A>>(1); |
458 | 0 | } Unexecuted instantiation: absl::inlined_vector_internal::Storage<absl::str_format_internal::FormatArgImpl, 4ul, std::__1::allocator<absl::str_format_internal::FormatArgImpl> >::SetIsAllocated() Unexecuted instantiation: absl::inlined_vector_internal::Storage<absl::LogSink*, 16ul, std::__1::allocator<absl::LogSink*> >::SetIsAllocated() |
459 | | |
460 | | void UnsetIsAllocated() { |
461 | | GetSizeAndIsAllocated() &= ((std::numeric_limits<SizeType<A>>::max)() - 1); |
462 | | } |
463 | | |
464 | | void SetSize(SizeType<A> size) { |
465 | | GetSizeAndIsAllocated() = |
466 | | (size << 1) | static_cast<SizeType<A>>(GetIsAllocated()); |
467 | | } |
468 | | |
469 | 0 | void SetAllocatedSize(SizeType<A> size) { |
470 | 0 | GetSizeAndIsAllocated() = (size << 1) | static_cast<SizeType<A>>(1); |
471 | 0 | } |
472 | | |
473 | 0 | void SetInlinedSize(SizeType<A> size) { |
474 | 0 | GetSizeAndIsAllocated() = size << static_cast<SizeType<A>>(1); |
475 | 0 | } Unexecuted instantiation: absl::inlined_vector_internal::Storage<absl::str_format_internal::FormatArgImpl, 4ul, std::__1::allocator<absl::str_format_internal::FormatArgImpl> >::SetInlinedSize(unsigned long) Unexecuted instantiation: absl::inlined_vector_internal::Storage<absl::LogSink*, 16ul, std::__1::allocator<absl::LogSink*> >::SetInlinedSize(unsigned long) |
476 | | |
477 | 0 | void AddSize(SizeType<A> count) { |
478 | 0 | GetSizeAndIsAllocated() += count << static_cast<SizeType<A>>(1); |
479 | 0 | } Unexecuted instantiation: absl::inlined_vector_internal::Storage<absl::str_format_internal::FormatArgImpl, 4ul, std::__1::allocator<absl::str_format_internal::FormatArgImpl> >::AddSize(unsigned long) Unexecuted instantiation: absl::inlined_vector_internal::Storage<absl::LogSink*, 16ul, std::__1::allocator<absl::LogSink*> >::AddSize(unsigned long) |
480 | | |
481 | | void SubtractSize(SizeType<A> count) { |
482 | | ABSL_HARDENING_ASSERT(count <= GetSize()); |
483 | | |
484 | | GetSizeAndIsAllocated() -= count << static_cast<SizeType<A>>(1); |
485 | | } |
486 | | |
487 | 0 | void SetAllocation(Allocation<A> allocation) { |
488 | 0 | data_.allocated.allocated_data = allocation.data; |
489 | 0 | data_.allocated.allocated_capacity = allocation.capacity; |
490 | 0 | } Unexecuted instantiation: absl::inlined_vector_internal::Storage<absl::str_format_internal::FormatArgImpl, 4ul, std::__1::allocator<absl::str_format_internal::FormatArgImpl> >::SetAllocation(absl::inlined_vector_internal::Allocation<std::__1::allocator<absl::str_format_internal::FormatArgImpl> >) Unexecuted instantiation: absl::inlined_vector_internal::Storage<absl::LogSink*, 16ul, std::__1::allocator<absl::LogSink*> >::SetAllocation(absl::inlined_vector_internal::Allocation<std::__1::allocator<absl::LogSink*> >) |
491 | | |
492 | 0 | void MemcpyFrom(const Storage& other_storage) { |
493 | 0 | // Assumption check: it doesn't make sense to memcpy inlined elements unless |
494 | 0 | // we know the allocator doesn't do anything fancy, and one of the following |
495 | 0 | // holds: |
496 | 0 | // |
497 | 0 | // * The elements are trivially relocatable. |
498 | 0 | // |
499 | 0 | // * It's possible to trivially assign the elements and then destroy the |
500 | 0 | // source. |
501 | 0 | // |
502 | 0 | // * It's possible to trivially copy construct/assign the elements. |
503 | 0 | // |
504 | 0 | { |
505 | 0 | using V = ValueType<A>; |
506 | 0 | ABSL_HARDENING_ASSERT( |
507 | 0 | other_storage.GetIsAllocated() || |
508 | 0 | (std::is_same<A, std::allocator<V>>::value && |
509 | 0 | ( |
510 | 0 | // First case above |
511 | 0 | absl::is_trivially_relocatable<V>::value || |
512 | 0 | // Second case above |
513 | 0 | (absl::is_trivially_move_assignable<V>::value && |
514 | 0 | absl::is_trivially_destructible<V>::value) || |
515 | 0 | // Third case above |
516 | 0 | (absl::is_trivially_copy_constructible<V>::value || |
517 | 0 | absl::is_trivially_copy_assignable<V>::value)))); |
518 | 0 | } |
519 | 0 |
|
520 | 0 | GetSizeAndIsAllocated() = other_storage.GetSizeAndIsAllocated(); |
521 | 0 | data_ = other_storage.data_; |
522 | 0 | } |
523 | | |
524 | 0 | void DeallocateIfAllocated() { |
525 | 0 | if (GetIsAllocated()) { |
526 | 0 | MallocAdapter<A>::Deallocate(GetAllocator(), GetAllocatedData(), |
527 | 0 | GetAllocatedCapacity()); |
528 | 0 | } |
529 | 0 | } Unexecuted instantiation: absl::inlined_vector_internal::Storage<absl::str_format_internal::FormatArgImpl, 4ul, std::__1::allocator<absl::str_format_internal::FormatArgImpl> >::DeallocateIfAllocated() Unexecuted instantiation: absl::inlined_vector_internal::Storage<absl::LogSink*, 16ul, std::__1::allocator<absl::LogSink*> >::DeallocateIfAllocated() |
530 | | |
531 | | private: |
532 | | ABSL_ATTRIBUTE_NOINLINE void DestroyContents(); |
533 | | |
534 | | using Metadata = container_internal::CompressedTuple<A, SizeType<A>>; |
535 | | |
536 | | struct Allocated { |
537 | | Pointer<A> allocated_data; |
538 | | SizeType<A> allocated_capacity; |
539 | | }; |
540 | | |
541 | | // `kOptimalInlinedSize` is an automatically adjusted inlined capacity of the |
542 | | // `InlinedVector`. Sometimes, it is possible to increase the capacity (from |
543 | | // the user requested `N`) without increasing the size of the `InlinedVector`. |
544 | | static constexpr size_t kOptimalInlinedSize = |
545 | | (std::max)(N, sizeof(Allocated) / sizeof(ValueType<A>)); |
546 | | |
547 | | struct Inlined { |
548 | | alignas(ValueType<A>) char inlined_data[sizeof( |
549 | | ValueType<A>[kOptimalInlinedSize])]; |
550 | | }; |
551 | | |
552 | | union Data { |
553 | | Allocated allocated; |
554 | | Inlined inlined; |
555 | | }; |
556 | | |
557 | | void SwapN(ElementwiseSwapPolicy, Storage* other, SizeType<A> n); |
558 | | void SwapN(ElementwiseConstructPolicy, Storage* other, SizeType<A> n); |
559 | | |
560 | | void SwapInlinedElements(MemcpyPolicy, Storage* other); |
561 | | template <typename NotMemcpyPolicy> |
562 | | void SwapInlinedElements(NotMemcpyPolicy, Storage* other); |
563 | | |
564 | | template <typename... Args> |
565 | | ABSL_ATTRIBUTE_NOINLINE Reference<A> EmplaceBackSlow(Args&&... args); |
566 | | |
567 | | Metadata metadata_; |
568 | | Data data_; |
569 | | }; |
570 | | |
571 | | template <typename T, size_t N, typename A> |
572 | 0 | void Storage<T, N, A>::DestroyContents() { |
573 | 0 | Pointer<A> data = GetIsAllocated() ? GetAllocatedData() : GetInlinedData(); |
574 | 0 | DestroyAdapter<A>::DestroyElements(GetAllocator(), data, GetSize()); |
575 | 0 | DeallocateIfAllocated(); |
576 | 0 | } Unexecuted instantiation: absl::inlined_vector_internal::Storage<absl::str_format_internal::FormatArgImpl, 4ul, std::__1::allocator<absl::str_format_internal::FormatArgImpl> >::DestroyContents() Unexecuted instantiation: absl::inlined_vector_internal::Storage<absl::LogSink*, 16ul, std::__1::allocator<absl::LogSink*> >::DestroyContents() |
577 | | |
578 | | template <typename T, size_t N, typename A> |
579 | | void Storage<T, N, A>::InitFrom(const Storage& other) { |
580 | | const SizeType<A> n = other.GetSize(); |
581 | | ABSL_HARDENING_ASSERT(n > 0); // Empty sources handled handled in caller. |
582 | | ConstPointer<A> src; |
583 | | Pointer<A> dst; |
584 | | if (!other.GetIsAllocated()) { |
585 | | dst = GetInlinedData(); |
586 | | src = other.GetInlinedData(); |
587 | | } else { |
588 | | // Because this is only called from the `InlinedVector` constructors, it's |
589 | | // safe to take on the allocation with size `0`. If `ConstructElements(...)` |
590 | | // throws, deallocation will be automatically handled by `~Storage()`. |
591 | | SizeType<A> requested_capacity = ComputeCapacity(GetInlinedCapacity(), n); |
592 | | Allocation<A> allocation = |
593 | | MallocAdapter<A>::Allocate(GetAllocator(), requested_capacity); |
594 | | SetAllocation(allocation); |
595 | | dst = allocation.data; |
596 | | src = other.GetAllocatedData(); |
597 | | } |
598 | | |
599 | | // Fast path: if the value type is trivially copy constructible and we know |
600 | | // the allocator doesn't do anything fancy, then we know it is legal for us to |
601 | | // simply memcpy the other vector's elements. |
602 | | if (absl::is_trivially_copy_constructible<ValueType<A>>::value && |
603 | | std::is_same<A, std::allocator<ValueType<A>>>::value) { |
604 | | std::memcpy(reinterpret_cast<char*>(dst), |
605 | | reinterpret_cast<const char*>(src), n * sizeof(ValueType<A>)); |
606 | | } else { |
607 | | auto values = IteratorValueAdapter<A, ConstPointer<A>>(src); |
608 | | ConstructElements<A>(GetAllocator(), dst, values, n); |
609 | | } |
610 | | |
611 | | GetSizeAndIsAllocated() = other.GetSizeAndIsAllocated(); |
612 | | } |
613 | | |
614 | | template <typename T, size_t N, typename A> |
615 | | template <typename ValueAdapter> |
616 | | auto Storage<T, N, A>::Initialize(ValueAdapter values, SizeType<A> new_size) |
617 | 0 | -> void { |
618 | | // Only callable from constructors! |
619 | 0 | ABSL_HARDENING_ASSERT(!GetIsAllocated()); |
620 | 0 | ABSL_HARDENING_ASSERT(GetSize() == 0); |
621 | |
|
622 | 0 | Pointer<A> construct_data; |
623 | 0 | if (new_size > GetInlinedCapacity()) { |
624 | | // Because this is only called from the `InlinedVector` constructors, it's |
625 | | // safe to take on the allocation with size `0`. If `ConstructElements(...)` |
626 | | // throws, deallocation will be automatically handled by `~Storage()`. |
627 | 0 | SizeType<A> requested_capacity = |
628 | 0 | ComputeCapacity(GetInlinedCapacity(), new_size); |
629 | 0 | Allocation<A> allocation = |
630 | 0 | MallocAdapter<A>::Allocate(GetAllocator(), requested_capacity); |
631 | 0 | construct_data = allocation.data; |
632 | 0 | SetAllocation(allocation); |
633 | 0 | SetIsAllocated(); |
634 | 0 | } else { |
635 | 0 | construct_data = GetInlinedData(); |
636 | 0 | } |
637 | |
|
638 | 0 | ConstructElements<A>(GetAllocator(), construct_data, values, new_size); |
639 | | |
640 | | // Since the initial size was guaranteed to be `0` and the allocated bit is |
641 | | // already correct for either case, *adding* `new_size` gives us the correct |
642 | | // result faster than setting it directly. |
643 | 0 | AddSize(new_size); |
644 | 0 | } |
645 | | |
646 | | template <typename T, size_t N, typename A> |
647 | | template <typename ValueAdapter> |
648 | | auto Storage<T, N, A>::Assign(ValueAdapter values, SizeType<A> new_size) |
649 | | -> void { |
650 | | StorageView<A> storage_view = MakeStorageView(); |
651 | | |
652 | | AllocationTransaction<A> allocation_tx(GetAllocator()); |
653 | | |
654 | | absl::Span<ValueType<A>> assign_loop; |
655 | | absl::Span<ValueType<A>> construct_loop; |
656 | | absl::Span<ValueType<A>> destroy_loop; |
657 | | |
658 | | if (new_size > storage_view.capacity) { |
659 | | SizeType<A> requested_capacity = |
660 | | ComputeCapacity(storage_view.capacity, new_size); |
661 | | construct_loop = {allocation_tx.Allocate(requested_capacity), new_size}; |
662 | | destroy_loop = {storage_view.data, storage_view.size}; |
663 | | } else if (new_size > storage_view.size) { |
664 | | assign_loop = {storage_view.data, storage_view.size}; |
665 | | construct_loop = {storage_view.data + storage_view.size, |
666 | | new_size - storage_view.size}; |
667 | | } else { |
668 | | assign_loop = {storage_view.data, new_size}; |
669 | | destroy_loop = {storage_view.data + new_size, storage_view.size - new_size}; |
670 | | } |
671 | | |
672 | | AssignElements<A>(assign_loop.data(), values, assign_loop.size()); |
673 | | |
674 | | ConstructElements<A>(GetAllocator(), construct_loop.data(), values, |
675 | | construct_loop.size()); |
676 | | |
677 | | DestroyAdapter<A>::DestroyElements(GetAllocator(), destroy_loop.data(), |
678 | | destroy_loop.size()); |
679 | | |
680 | | if (allocation_tx.DidAllocate()) { |
681 | | DeallocateIfAllocated(); |
682 | | SetAllocation(std::move(allocation_tx).Release()); |
683 | | SetIsAllocated(); |
684 | | } |
685 | | |
686 | | SetSize(new_size); |
687 | | } |
688 | | |
689 | | template <typename T, size_t N, typename A> |
690 | | template <typename ValueAdapter> |
691 | | auto Storage<T, N, A>::Resize(ValueAdapter values, SizeType<A> new_size) |
692 | | -> void { |
693 | | StorageView<A> storage_view = MakeStorageView(); |
694 | | Pointer<A> const base = storage_view.data; |
695 | | const SizeType<A> size = storage_view.size; |
696 | | A& alloc = GetAllocator(); |
697 | | if (new_size <= size) { |
698 | | // Destroy extra old elements. |
699 | | DestroyAdapter<A>::DestroyElements(alloc, base + new_size, size - new_size); |
700 | | } else if (new_size <= storage_view.capacity) { |
701 | | // Construct new elements in place. |
702 | | ConstructElements<A>(alloc, base + size, values, new_size - size); |
703 | | } else { |
704 | | // Steps: |
705 | | // a. Allocate new backing store. |
706 | | // b. Construct new elements in new backing store. |
707 | | // c. Move existing elements from old backing store to new backing store. |
708 | | // d. Destroy all elements in old backing store. |
709 | | // Use transactional wrappers for the first two steps so we can roll |
710 | | // back if necessary due to exceptions. |
711 | | AllocationTransaction<A> allocation_tx(alloc); |
712 | | SizeType<A> requested_capacity = |
713 | | ComputeCapacity(storage_view.capacity, new_size); |
714 | | Pointer<A> new_data = allocation_tx.Allocate(requested_capacity); |
715 | | |
716 | | ConstructionTransaction<A> construction_tx(alloc); |
717 | | construction_tx.Construct(new_data + size, values, new_size - size); |
718 | | |
719 | | IteratorValueAdapter<A, MoveIterator<A>> move_values( |
720 | | (MoveIterator<A>(base))); |
721 | | ConstructElements<A>(alloc, new_data, move_values, size); |
722 | | |
723 | | DestroyAdapter<A>::DestroyElements(alloc, base, size); |
724 | | std::move(construction_tx).Commit(); |
725 | | DeallocateIfAllocated(); |
726 | | SetAllocation(std::move(allocation_tx).Release()); |
727 | | SetIsAllocated(); |
728 | | } |
729 | | SetSize(new_size); |
730 | | } |
731 | | |
732 | | template <typename T, size_t N, typename A> |
733 | | template <typename ValueAdapter> |
734 | | auto Storage<T, N, A>::Insert(ConstIterator<A> pos, ValueAdapter values, |
735 | | SizeType<A> insert_count) -> Iterator<A> { |
736 | | StorageView<A> storage_view = MakeStorageView(); |
737 | | |
738 | | auto insert_index = static_cast<SizeType<A>>( |
739 | | std::distance(ConstIterator<A>(storage_view.data), pos)); |
740 | | SizeType<A> insert_end_index = insert_index + insert_count; |
741 | | SizeType<A> new_size = storage_view.size + insert_count; |
742 | | |
743 | | if (new_size > storage_view.capacity) { |
744 | | AllocationTransaction<A> allocation_tx(GetAllocator()); |
745 | | ConstructionTransaction<A> construction_tx(GetAllocator()); |
746 | | ConstructionTransaction<A> move_construction_tx(GetAllocator()); |
747 | | |
748 | | IteratorValueAdapter<A, MoveIterator<A>> move_values( |
749 | | MoveIterator<A>(storage_view.data)); |
750 | | |
751 | | SizeType<A> requested_capacity = |
752 | | ComputeCapacity(storage_view.capacity, new_size); |
753 | | Pointer<A> new_data = allocation_tx.Allocate(requested_capacity); |
754 | | |
755 | | construction_tx.Construct(new_data + insert_index, values, insert_count); |
756 | | |
757 | | move_construction_tx.Construct(new_data, move_values, insert_index); |
758 | | |
759 | | ConstructElements<A>(GetAllocator(), new_data + insert_end_index, |
760 | | move_values, storage_view.size - insert_index); |
761 | | |
762 | | DestroyAdapter<A>::DestroyElements(GetAllocator(), storage_view.data, |
763 | | storage_view.size); |
764 | | |
765 | | std::move(construction_tx).Commit(); |
766 | | std::move(move_construction_tx).Commit(); |
767 | | DeallocateIfAllocated(); |
768 | | SetAllocation(std::move(allocation_tx).Release()); |
769 | | |
770 | | SetAllocatedSize(new_size); |
771 | | return Iterator<A>(new_data + insert_index); |
772 | | } else { |
773 | | SizeType<A> move_construction_destination_index = |
774 | | (std::max)(insert_end_index, storage_view.size); |
775 | | |
776 | | ConstructionTransaction<A> move_construction_tx(GetAllocator()); |
777 | | |
778 | | IteratorValueAdapter<A, MoveIterator<A>> move_construction_values( |
779 | | MoveIterator<A>(storage_view.data + |
780 | | (move_construction_destination_index - insert_count))); |
781 | | absl::Span<ValueType<A>> move_construction = { |
782 | | storage_view.data + move_construction_destination_index, |
783 | | new_size - move_construction_destination_index}; |
784 | | |
785 | | Pointer<A> move_assignment_values = storage_view.data + insert_index; |
786 | | absl::Span<ValueType<A>> move_assignment = { |
787 | | storage_view.data + insert_end_index, |
788 | | move_construction_destination_index - insert_end_index}; |
789 | | |
790 | | absl::Span<ValueType<A>> insert_assignment = {move_assignment_values, |
791 | | move_construction.size()}; |
792 | | |
793 | | absl::Span<ValueType<A>> insert_construction = { |
794 | | insert_assignment.data() + insert_assignment.size(), |
795 | | insert_count - insert_assignment.size()}; |
796 | | |
797 | | move_construction_tx.Construct(move_construction.data(), |
798 | | move_construction_values, |
799 | | move_construction.size()); |
800 | | |
801 | | for (Pointer<A> |
802 | | destination = move_assignment.data() + move_assignment.size(), |
803 | | last_destination = move_assignment.data(), |
804 | | source = move_assignment_values + move_assignment.size(); |
805 | | ;) { |
806 | | --destination; |
807 | | --source; |
808 | | if (destination < last_destination) break; |
809 | | *destination = std::move(*source); |
810 | | } |
811 | | |
812 | | AssignElements<A>(insert_assignment.data(), values, |
813 | | insert_assignment.size()); |
814 | | |
815 | | ConstructElements<A>(GetAllocator(), insert_construction.data(), values, |
816 | | insert_construction.size()); |
817 | | |
818 | | std::move(move_construction_tx).Commit(); |
819 | | |
820 | | AddSize(insert_count); |
821 | | return Iterator<A>(storage_view.data + insert_index); |
822 | | } |
823 | | } |
824 | | |
825 | | template <typename T, size_t N, typename A> |
826 | | template <typename... Args> |
827 | 0 | auto Storage<T, N, A>::EmplaceBack(Args&&... args) -> Reference<A> { |
828 | 0 | StorageView<A> storage_view = MakeStorageView(); |
829 | 0 | const SizeType<A> n = storage_view.size; |
830 | 0 | if (ABSL_PREDICT_TRUE(n != storage_view.capacity)) { |
831 | | // Fast path; new element fits. |
832 | 0 | Pointer<A> last_ptr = storage_view.data + n; |
833 | 0 | AllocatorTraits<A>::construct(GetAllocator(), last_ptr, |
834 | 0 | std::forward<Args>(args)...); |
835 | 0 | AddSize(1); |
836 | 0 | return *last_ptr; |
837 | 0 | } |
838 | | // TODO(b/173712035): Annotate with musttail attribute to prevent regression. |
839 | 0 | return EmplaceBackSlow(std::forward<Args>(args)...); |
840 | 0 | } |
841 | | |
842 | | template <typename T, size_t N, typename A> |
843 | | template <typename... Args> |
844 | 0 | auto Storage<T, N, A>::EmplaceBackSlow(Args&&... args) -> Reference<A> { |
845 | 0 | StorageView<A> storage_view = MakeStorageView(); |
846 | 0 | AllocationTransaction<A> allocation_tx(GetAllocator()); |
847 | 0 | IteratorValueAdapter<A, MoveIterator<A>> move_values( |
848 | 0 | MoveIterator<A>(storage_view.data)); |
849 | 0 | SizeType<A> requested_capacity = NextCapacity(storage_view.capacity); |
850 | 0 | Pointer<A> construct_data = allocation_tx.Allocate(requested_capacity); |
851 | 0 | Pointer<A> last_ptr = construct_data + storage_view.size; |
852 | | |
853 | | // Construct new element. |
854 | 0 | AllocatorTraits<A>::construct(GetAllocator(), last_ptr, |
855 | 0 | std::forward<Args>(args)...); |
856 | | // Move elements from old backing store to new backing store. |
857 | 0 | ABSL_INTERNAL_TRY { |
858 | 0 | ConstructElements<A>(GetAllocator(), allocation_tx.GetData(), move_values, |
859 | 0 | storage_view.size); |
860 | 0 | } |
861 | 0 | ABSL_INTERNAL_CATCH_ANY { |
862 | 0 | AllocatorTraits<A>::destroy(GetAllocator(), last_ptr); |
863 | 0 | ABSL_INTERNAL_RETHROW; |
864 | 0 | } |
865 | | // Destroy elements in old backing store. |
866 | 0 | DestroyAdapter<A>::DestroyElements(GetAllocator(), storage_view.data, |
867 | 0 | storage_view.size); |
868 | |
|
869 | 0 | DeallocateIfAllocated(); |
870 | 0 | SetAllocation(std::move(allocation_tx).Release()); |
871 | 0 | SetIsAllocated(); |
872 | 0 | AddSize(1); |
873 | 0 | return *last_ptr; |
874 | 0 | } |
875 | | |
876 | | template <typename T, size_t N, typename A> |
877 | | auto Storage<T, N, A>::Erase(ConstIterator<A> from, ConstIterator<A> to) |
878 | | -> Iterator<A> { |
879 | | StorageView<A> storage_view = MakeStorageView(); |
880 | | |
881 | | auto erase_size = static_cast<SizeType<A>>(std::distance(from, to)); |
882 | | auto erase_index = static_cast<SizeType<A>>( |
883 | | std::distance(ConstIterator<A>(storage_view.data), from)); |
884 | | SizeType<A> erase_end_index = erase_index + erase_size; |
885 | | |
886 | | IteratorValueAdapter<A, MoveIterator<A>> move_values( |
887 | | MoveIterator<A>(storage_view.data + erase_end_index)); |
888 | | |
889 | | AssignElements<A>(storage_view.data + erase_index, move_values, |
890 | | storage_view.size - erase_end_index); |
891 | | |
892 | | DestroyAdapter<A>::DestroyElements( |
893 | | GetAllocator(), storage_view.data + (storage_view.size - erase_size), |
894 | | erase_size); |
895 | | |
896 | | SubtractSize(erase_size); |
897 | | return Iterator<A>(storage_view.data + erase_index); |
898 | | } |
899 | | |
900 | | template <typename T, size_t N, typename A> |
901 | | auto Storage<T, N, A>::Reserve(SizeType<A> requested_capacity) -> void { |
902 | | StorageView<A> storage_view = MakeStorageView(); |
903 | | |
904 | | if (ABSL_PREDICT_FALSE(requested_capacity <= storage_view.capacity)) return; |
905 | | |
906 | | AllocationTransaction<A> allocation_tx(GetAllocator()); |
907 | | |
908 | | IteratorValueAdapter<A, MoveIterator<A>> move_values( |
909 | | MoveIterator<A>(storage_view.data)); |
910 | | |
911 | | SizeType<A> new_requested_capacity = |
912 | | ComputeCapacity(storage_view.capacity, requested_capacity); |
913 | | Pointer<A> new_data = allocation_tx.Allocate(new_requested_capacity); |
914 | | |
915 | | ConstructElements<A>(GetAllocator(), new_data, move_values, |
916 | | storage_view.size); |
917 | | |
918 | | DestroyAdapter<A>::DestroyElements(GetAllocator(), storage_view.data, |
919 | | storage_view.size); |
920 | | |
921 | | DeallocateIfAllocated(); |
922 | | SetAllocation(std::move(allocation_tx).Release()); |
923 | | SetIsAllocated(); |
924 | | } |
925 | | |
926 | | template <typename T, size_t N, typename A> |
927 | | auto Storage<T, N, A>::ShrinkToFit() -> void { |
928 | | // May only be called on allocated instances! |
929 | | ABSL_HARDENING_ASSERT(GetIsAllocated()); |
930 | | |
931 | | StorageView<A> storage_view{GetAllocatedData(), GetSize(), |
932 | | GetAllocatedCapacity()}; |
933 | | |
934 | | if (ABSL_PREDICT_FALSE(storage_view.size == storage_view.capacity)) return; |
935 | | |
936 | | AllocationTransaction<A> allocation_tx(GetAllocator()); |
937 | | |
938 | | IteratorValueAdapter<A, MoveIterator<A>> move_values( |
939 | | MoveIterator<A>(storage_view.data)); |
940 | | |
941 | | Pointer<A> construct_data; |
942 | | if (storage_view.size > GetInlinedCapacity()) { |
943 | | SizeType<A> requested_capacity = storage_view.size; |
944 | | construct_data = allocation_tx.Allocate(requested_capacity); |
945 | | if (allocation_tx.GetCapacity() >= storage_view.capacity) { |
946 | | // Already using the smallest available heap allocation. |
947 | | return; |
948 | | } |
949 | | } else { |
950 | | construct_data = GetInlinedData(); |
951 | | } |
952 | | |
953 | | ABSL_INTERNAL_TRY { |
954 | | ConstructElements<A>(GetAllocator(), construct_data, move_values, |
955 | | storage_view.size); |
956 | | } |
957 | | ABSL_INTERNAL_CATCH_ANY { |
958 | | SetAllocation({storage_view.data, storage_view.capacity}); |
959 | | ABSL_INTERNAL_RETHROW; |
960 | | } |
961 | | |
962 | | DestroyAdapter<A>::DestroyElements(GetAllocator(), storage_view.data, |
963 | | storage_view.size); |
964 | | |
965 | | MallocAdapter<A>::Deallocate(GetAllocator(), storage_view.data, |
966 | | storage_view.capacity); |
967 | | |
968 | | if (allocation_tx.DidAllocate()) { |
969 | | SetAllocation(std::move(allocation_tx).Release()); |
970 | | } else { |
971 | | UnsetIsAllocated(); |
972 | | } |
973 | | } |
974 | | |
975 | | template <typename T, size_t N, typename A> |
976 | | auto Storage<T, N, A>::Swap(Storage* other_storage_ptr) -> void { |
977 | | using std::swap; |
978 | | ABSL_HARDENING_ASSERT(this != other_storage_ptr); |
979 | | |
980 | | if (GetIsAllocated() && other_storage_ptr->GetIsAllocated()) { |
981 | | swap(data_.allocated, other_storage_ptr->data_.allocated); |
982 | | } else if (!GetIsAllocated() && !other_storage_ptr->GetIsAllocated()) { |
983 | | SwapInlinedElements(SwapInlinedElementsPolicy{}, other_storage_ptr); |
984 | | } else { |
985 | | Storage* allocated_ptr = this; |
986 | | Storage* inlined_ptr = other_storage_ptr; |
987 | | if (!allocated_ptr->GetIsAllocated()) swap(allocated_ptr, inlined_ptr); |
988 | | |
989 | | StorageView<A> allocated_storage_view{ |
990 | | allocated_ptr->GetAllocatedData(), allocated_ptr->GetSize(), |
991 | | allocated_ptr->GetAllocatedCapacity()}; |
992 | | |
993 | | IteratorValueAdapter<A, MoveIterator<A>> move_values( |
994 | | MoveIterator<A>(inlined_ptr->GetInlinedData())); |
995 | | |
996 | | ABSL_INTERNAL_TRY { |
997 | | ConstructElements<A>(inlined_ptr->GetAllocator(), |
998 | | allocated_ptr->GetInlinedData(), move_values, |
999 | | inlined_ptr->GetSize()); |
1000 | | } |
1001 | | ABSL_INTERNAL_CATCH_ANY { |
1002 | | allocated_ptr->SetAllocation(Allocation<A>{ |
1003 | | allocated_storage_view.data, allocated_storage_view.capacity}); |
1004 | | ABSL_INTERNAL_RETHROW; |
1005 | | } |
1006 | | |
1007 | | DestroyAdapter<A>::DestroyElements(inlined_ptr->GetAllocator(), |
1008 | | inlined_ptr->GetInlinedData(), |
1009 | | inlined_ptr->GetSize()); |
1010 | | |
1011 | | inlined_ptr->SetAllocation(Allocation<A>{allocated_storage_view.data, |
1012 | | allocated_storage_view.capacity}); |
1013 | | } |
1014 | | |
1015 | | swap(GetSizeAndIsAllocated(), other_storage_ptr->GetSizeAndIsAllocated()); |
1016 | | swap(GetAllocator(), other_storage_ptr->GetAllocator()); |
1017 | | } |
1018 | | |
1019 | | template <typename T, size_t N, typename A> |
1020 | | void Storage<T, N, A>::SwapN(ElementwiseSwapPolicy, Storage* other, |
1021 | | SizeType<A> n) { |
1022 | | std::swap_ranges(GetInlinedData(), GetInlinedData() + n, |
1023 | | other->GetInlinedData()); |
1024 | | } |
1025 | | |
1026 | | template <typename T, size_t N, typename A> |
1027 | | void Storage<T, N, A>::SwapN(ElementwiseConstructPolicy, Storage* other, |
1028 | | SizeType<A> n) { |
1029 | | Pointer<A> a = GetInlinedData(); |
1030 | | Pointer<A> b = other->GetInlinedData(); |
1031 | | // see note on allocators in `SwapInlinedElements`. |
1032 | | A& allocator_a = GetAllocator(); |
1033 | | A& allocator_b = other->GetAllocator(); |
1034 | | for (SizeType<A> i = 0; i < n; ++i, ++a, ++b) { |
1035 | | ValueType<A> tmp(std::move(*a)); |
1036 | | |
1037 | | AllocatorTraits<A>::destroy(allocator_a, a); |
1038 | | AllocatorTraits<A>::construct(allocator_b, a, std::move(*b)); |
1039 | | |
1040 | | AllocatorTraits<A>::destroy(allocator_b, b); |
1041 | | AllocatorTraits<A>::construct(allocator_a, b, std::move(tmp)); |
1042 | | } |
1043 | | } |
1044 | | |
1045 | | template <typename T, size_t N, typename A> |
1046 | | void Storage<T, N, A>::SwapInlinedElements(MemcpyPolicy, Storage* other) { |
1047 | | Data tmp = data_; |
1048 | | data_ = other->data_; |
1049 | | other->data_ = tmp; |
1050 | | } |
1051 | | |
1052 | | template <typename T, size_t N, typename A> |
1053 | | template <typename NotMemcpyPolicy> |
1054 | | void Storage<T, N, A>::SwapInlinedElements(NotMemcpyPolicy policy, |
1055 | | Storage* other) { |
1056 | | // Note: `destroy` needs to use pre-swap allocator while `construct` - |
1057 | | // post-swap allocator. Allocators will be swapped later on outside of |
1058 | | // `SwapInlinedElements`. |
1059 | | Storage* small_ptr = this; |
1060 | | Storage* large_ptr = other; |
1061 | | if (small_ptr->GetSize() > large_ptr->GetSize()) { |
1062 | | std::swap(small_ptr, large_ptr); |
1063 | | } |
1064 | | |
1065 | | auto small_size = small_ptr->GetSize(); |
1066 | | auto diff = large_ptr->GetSize() - small_size; |
1067 | | SwapN(policy, other, small_size); |
1068 | | |
1069 | | IteratorValueAdapter<A, MoveIterator<A>> move_values( |
1070 | | MoveIterator<A>(large_ptr->GetInlinedData() + small_size)); |
1071 | | |
1072 | | ConstructElements<A>(large_ptr->GetAllocator(), |
1073 | | small_ptr->GetInlinedData() + small_size, move_values, |
1074 | | diff); |
1075 | | |
1076 | | DestroyAdapter<A>::DestroyElements(large_ptr->GetAllocator(), |
1077 | | large_ptr->GetInlinedData() + small_size, |
1078 | | diff); |
1079 | | } |
1080 | | |
1081 | | // End ignore "array-bounds" |
1082 | | #if !defined(__clang__) && defined(__GNUC__) |
1083 | | #pragma GCC diagnostic pop |
1084 | | #endif |
1085 | | |
1086 | | } // namespace inlined_vector_internal |
1087 | | ABSL_NAMESPACE_END |
1088 | | } // namespace absl |
1089 | | |
1090 | | #endif // ABSL_CONTAINER_INTERNAL_INLINED_VECTOR_H_ |