/src/abseil-cpp/absl/container/internal/inlined_vector.h
Line | Count | Source |
1 | | // Copyright 2019 The Abseil Authors. |
2 | | // |
3 | | // Licensed under the Apache License, Version 2.0 (the "License"); |
4 | | // you may not use this file except in compliance with the License. |
5 | | // You may obtain a copy of the License at |
6 | | // |
7 | | // https://www.apache.org/licenses/LICENSE-2.0 |
8 | | // |
9 | | // Unless required by applicable law or agreed to in writing, software |
10 | | // distributed under the License is distributed on an "AS IS" BASIS, |
11 | | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
12 | | // See the License for the specific language governing permissions and |
13 | | // limitations under the License. |
14 | | |
15 | | #ifndef ABSL_CONTAINER_INTERNAL_INLINED_VECTOR_H_ |
16 | | #define ABSL_CONTAINER_INTERNAL_INLINED_VECTOR_H_ |
17 | | |
18 | | #include <algorithm> |
19 | | #include <cstddef> |
20 | | #include <cstring> |
21 | | #include <iterator> |
22 | | #include <limits> |
23 | | #include <memory> |
24 | | #include <new> |
25 | | #include <type_traits> |
26 | | #include <utility> |
27 | | |
28 | | #include "absl/base/attributes.h" |
29 | | #include "absl/base/config.h" |
30 | | #include "absl/base/macros.h" |
31 | | #include "absl/container/internal/compressed_tuple.h" |
32 | | #include "absl/memory/memory.h" |
33 | | #include "absl/meta/type_traits.h" |
34 | | #include "absl/types/span.h" |
35 | | |
36 | | namespace absl { |
37 | | ABSL_NAMESPACE_BEGIN |
38 | | namespace inlined_vector_internal { |
39 | | |
40 | | // GCC does not deal very well with the below code |
41 | | #if !defined(__clang__) && defined(__GNUC__) |
42 | | #pragma GCC diagnostic push |
43 | | #pragma GCC diagnostic ignored "-Warray-bounds" |
44 | | #endif |
45 | | |
46 | | template <typename A> |
47 | | using AllocatorTraits = std::allocator_traits<A>; |
48 | | template <typename A> |
49 | | using ValueType = typename AllocatorTraits<A>::value_type; |
50 | | template <typename A> |
51 | | using SizeType = typename AllocatorTraits<A>::size_type; |
52 | | template <typename A> |
53 | | using Pointer = typename AllocatorTraits<A>::pointer; |
54 | | template <typename A> |
55 | | using ConstPointer = typename AllocatorTraits<A>::const_pointer; |
56 | | template <typename A> |
57 | | using SizeType = typename AllocatorTraits<A>::size_type; |
58 | | template <typename A> |
59 | | using DifferenceType = typename AllocatorTraits<A>::difference_type; |
60 | | template <typename A> |
61 | | using Reference = ValueType<A>&; |
62 | | template <typename A> |
63 | | using ConstReference = const ValueType<A>&; |
64 | | template <typename A> |
65 | | using Iterator = Pointer<A>; |
66 | | template <typename A> |
67 | | using ConstIterator = ConstPointer<A>; |
68 | | template <typename A> |
69 | | using ReverseIterator = typename std::reverse_iterator<Iterator<A>>; |
70 | | template <typename A> |
71 | | using ConstReverseIterator = typename std::reverse_iterator<ConstIterator<A>>; |
72 | | template <typename A> |
73 | | using MoveIterator = typename std::move_iterator<Iterator<A>>; |
74 | | |
75 | | template <typename A> |
76 | | using IsMoveAssignOk = std::is_move_assignable<ValueType<A>>; |
77 | | template <typename A> |
78 | | using IsSwapOk = absl::type_traits_internal::IsSwappable<ValueType<A>>; |
79 | | |
80 | | template <typename A, |
81 | | bool IsTriviallyDestructible = |
82 | | absl::is_trivially_destructible<ValueType<A>>::value && |
83 | | std::is_same<A, std::allocator<ValueType<A>>>::value> |
84 | | struct DestroyAdapter; |
85 | | |
86 | | template <typename A> |
87 | | struct DestroyAdapter<A, /* IsTriviallyDestructible */ false> { |
88 | | static void DestroyElements(A& allocator, Pointer<A> destroy_first, |
89 | | SizeType<A> destroy_size) { |
90 | | for (SizeType<A> i = destroy_size; i != 0;) { |
91 | | --i; |
92 | | AllocatorTraits<A>::destroy(allocator, destroy_first + i); |
93 | | } |
94 | | } |
95 | | }; |
96 | | |
97 | | template <typename A> |
98 | | struct DestroyAdapter<A, /* IsTriviallyDestructible */ true> { |
99 | | static void DestroyElements(A& allocator, Pointer<A> destroy_first, |
100 | 0 | SizeType<A> destroy_size) { |
101 | 0 | static_cast<void>(allocator); |
102 | 0 | static_cast<void>(destroy_first); |
103 | 0 | static_cast<void>(destroy_size); |
104 | 0 | } Unexecuted instantiation: absl::inlined_vector_internal::DestroyAdapter<std::__1::allocator<absl::str_format_internal::FormatArgImpl>, true>::DestroyElements(std::__1::allocator<absl::str_format_internal::FormatArgImpl>&, absl::str_format_internal::FormatArgImpl*, unsigned long) Unexecuted instantiation: absl::inlined_vector_internal::DestroyAdapter<std::__1::allocator<absl::LogSink*>, true>::DestroyElements(std::__1::allocator<absl::LogSink*>&, absl::LogSink**, unsigned long) |
105 | | }; |
106 | | |
107 | | template <typename A> |
108 | | struct Allocation { |
109 | | Pointer<A> data = nullptr; |
110 | | SizeType<A> capacity = 0; |
111 | | }; |
112 | | |
113 | | template <typename A, |
114 | | bool IsOverAligned = |
115 | | (alignof(ValueType<A>) > ABSL_INTERNAL_DEFAULT_NEW_ALIGNMENT)> |
116 | | struct MallocAdapter { |
117 | 0 | static Allocation<A> Allocate(A& allocator, SizeType<A> requested_capacity) { |
118 | 0 | return {AllocatorTraits<A>::allocate(allocator, requested_capacity), |
119 | 0 | requested_capacity}; |
120 | 0 | } Unexecuted instantiation: absl::inlined_vector_internal::MallocAdapter<std::__1::allocator<absl::str_format_internal::FormatArgImpl>, false>::Allocate(std::__1::allocator<absl::str_format_internal::FormatArgImpl>&, unsigned long) Unexecuted instantiation: absl::inlined_vector_internal::MallocAdapter<std::__1::allocator<absl::LogSink*>, false>::Allocate(std::__1::allocator<absl::LogSink*>&, unsigned long) |
121 | | |
122 | | static void Deallocate(A& allocator, Pointer<A> pointer, |
123 | 0 | SizeType<A> capacity) { |
124 | 0 | AllocatorTraits<A>::deallocate(allocator, pointer, capacity); |
125 | 0 | } Unexecuted instantiation: absl::inlined_vector_internal::MallocAdapter<std::__1::allocator<absl::str_format_internal::FormatArgImpl>, false>::Deallocate(std::__1::allocator<absl::str_format_internal::FormatArgImpl>&, absl::str_format_internal::FormatArgImpl*, unsigned long) Unexecuted instantiation: absl::inlined_vector_internal::MallocAdapter<std::__1::allocator<absl::LogSink*>, false>::Deallocate(std::__1::allocator<absl::LogSink*>&, absl::LogSink**, unsigned long) |
126 | | }; |
127 | | |
128 | | template <typename A, typename ValueAdapter> |
129 | | void ConstructElements(absl::type_identity_t<A>& allocator, |
130 | | Pointer<A> construct_first, ValueAdapter& values, |
131 | 0 | SizeType<A> construct_size) { |
132 | 0 | for (SizeType<A> i = 0; i < construct_size; ++i) { |
133 | 0 | ABSL_INTERNAL_TRY { values.ConstructNext(allocator, construct_first + i); } |
134 | 0 | ABSL_INTERNAL_CATCH_ANY { |
135 | 0 | DestroyAdapter<A>::DestroyElements(allocator, construct_first, i); |
136 | 0 | ABSL_INTERNAL_RETHROW; |
137 | 0 | } |
138 | 0 | } |
139 | 0 | } Unexecuted instantiation: void absl::inlined_vector_internal::ConstructElements<std::__1::allocator<absl::str_format_internal::FormatArgImpl>, absl::inlined_vector_internal::IteratorValueAdapter<std::__1::allocator<absl::str_format_internal::FormatArgImpl>, absl::str_format_internal::FormatArgImpl const*> >(absl::type_identity<std::__1::allocator<absl::str_format_internal::FormatArgImpl> >::type&, std::__1::allocator_traits<std::__1::allocator<absl::str_format_internal::FormatArgImpl> >::pointer, absl::inlined_vector_internal::IteratorValueAdapter<std::__1::allocator<absl::str_format_internal::FormatArgImpl>, absl::str_format_internal::FormatArgImpl const*>&, std::__1::allocator_traits<std::__1::allocator<absl::str_format_internal::FormatArgImpl> >::size_type) Unexecuted instantiation: void absl::inlined_vector_internal::ConstructElements<std::__1::allocator<absl::LogSink*>, absl::inlined_vector_internal::IteratorValueAdapter<std::__1::allocator<absl::LogSink*>, std::__1::move_iterator<absl::LogSink**> > >(absl::type_identity<std::__1::allocator<absl::LogSink*> >::type&, std::__1::allocator_traits<std::__1::allocator<absl::LogSink*> >::pointer, absl::inlined_vector_internal::IteratorValueAdapter<std::__1::allocator<absl::LogSink*>, std::__1::move_iterator<absl::LogSink**> >&, std::__1::allocator_traits<std::__1::allocator<absl::LogSink*> >::size_type) |
140 | | |
141 | | template <typename A, typename ValueAdapter> |
142 | | void AssignElements(Pointer<A> assign_first, ValueAdapter& values, |
143 | | SizeType<A> assign_size) { |
144 | | for (SizeType<A> i = 0; i < assign_size; ++i) { |
145 | | values.AssignNext(assign_first + i); |
146 | | } |
147 | | } |
148 | | |
149 | | template <typename A> |
150 | | struct StorageView { |
151 | | Pointer<A> data; |
152 | | SizeType<A> size; |
153 | | SizeType<A> capacity; |
154 | | }; |
155 | | |
156 | | template <typename A, typename Iterator> |
157 | | class IteratorValueAdapter { |
158 | | public: |
159 | 0 | explicit IteratorValueAdapter(const Iterator& it) : it_(it) {} |
160 | | |
161 | 0 | void ConstructNext(A& allocator, Pointer<A> construct_at) { |
162 | 0 | AllocatorTraits<A>::construct(allocator, construct_at, *it_); |
163 | 0 | ++it_; |
164 | 0 | } Unexecuted instantiation: absl::inlined_vector_internal::IteratorValueAdapter<std::__1::allocator<absl::str_format_internal::FormatArgImpl>, absl::str_format_internal::FormatArgImpl const*>::ConstructNext(std::__1::allocator<absl::str_format_internal::FormatArgImpl>&, absl::str_format_internal::FormatArgImpl*) Unexecuted instantiation: absl::inlined_vector_internal::IteratorValueAdapter<std::__1::allocator<absl::LogSink*>, std::__1::move_iterator<absl::LogSink**> >::ConstructNext(std::__1::allocator<absl::LogSink*>&, absl::LogSink**) |
165 | | |
166 | | void AssignNext(Pointer<A> assign_at) { |
167 | | *assign_at = *it_; |
168 | | ++it_; |
169 | | } |
170 | | |
171 | | private: |
172 | | Iterator it_; |
173 | | }; |
174 | | |
175 | | template <typename A> |
176 | | class CopyValueAdapter { |
177 | | public: |
178 | | explicit CopyValueAdapter(ConstPointer<A> p) : ptr_(p) {} |
179 | | |
180 | | void ConstructNext(A& allocator, Pointer<A> construct_at) { |
181 | | AllocatorTraits<A>::construct(allocator, construct_at, *ptr_); |
182 | | } |
183 | | |
184 | | void AssignNext(Pointer<A> assign_at) { *assign_at = *ptr_; } |
185 | | |
186 | | private: |
187 | | ConstPointer<A> ptr_; |
188 | | }; |
189 | | |
190 | | template <typename A> |
191 | | class DefaultValueAdapter { |
192 | | public: |
193 | | explicit DefaultValueAdapter() {} |
194 | | |
195 | | void ConstructNext(A& allocator, Pointer<A> construct_at) { |
196 | | AllocatorTraits<A>::construct(allocator, construct_at); |
197 | | } |
198 | | |
199 | | void AssignNext(Pointer<A> assign_at) { *assign_at = ValueType<A>(); } |
200 | | }; |
201 | | |
202 | | template <typename A> |
203 | | class AllocationTransaction { |
204 | | public: |
205 | | explicit AllocationTransaction(A& allocator) |
206 | 0 | : allocator_data_(allocator, nullptr), capacity_(0) {} |
207 | | |
208 | 0 | ~AllocationTransaction() { |
209 | 0 | if (DidAllocate()) { |
210 | 0 | MallocAdapter<A>::Deallocate(GetAllocator(), GetData(), GetCapacity()); |
211 | 0 | } |
212 | 0 | } |
213 | | |
214 | | AllocationTransaction(const AllocationTransaction&) = delete; |
215 | | void operator=(const AllocationTransaction&) = delete; |
216 | | |
217 | 0 | A& GetAllocator() { return allocator_data_.template get<0>(); } |
218 | 0 | Pointer<A>& GetData() { return allocator_data_.template get<1>(); } |
219 | 0 | SizeType<A>& GetCapacity() { return capacity_; } |
220 | | |
221 | 0 | bool DidAllocate() { return GetData() != nullptr; } |
222 | | |
223 | 0 | Pointer<A> Allocate(SizeType<A> requested_capacity) { |
224 | 0 | Allocation<A> result = |
225 | 0 | MallocAdapter<A>::Allocate(GetAllocator(), requested_capacity); |
226 | 0 | GetData() = result.data; |
227 | 0 | GetCapacity() = result.capacity; |
228 | 0 | return result.data; |
229 | 0 | } |
230 | | |
231 | 0 | [[nodiscard]] Allocation<A> Release() && { |
232 | 0 | Allocation<A> result = {GetData(), GetCapacity()}; |
233 | 0 | Reset(); |
234 | 0 | return result; |
235 | 0 | } |
236 | | |
237 | | private: |
238 | 0 | void Reset() { |
239 | 0 | GetData() = nullptr; |
240 | 0 | GetCapacity() = 0; |
241 | 0 | } |
242 | | |
243 | | container_internal::CompressedTuple<A, Pointer<A>> allocator_data_; |
244 | | SizeType<A> capacity_; |
245 | | }; |
246 | | |
247 | | template <typename A> |
248 | | class ConstructionTransaction { |
249 | | public: |
250 | | explicit ConstructionTransaction(A& allocator) |
251 | | : allocator_data_(allocator, nullptr), size_(0) {} |
252 | | |
253 | | ~ConstructionTransaction() { |
254 | | if (DidConstruct()) { |
255 | | DestroyAdapter<A>::DestroyElements(GetAllocator(), GetData(), GetSize()); |
256 | | } |
257 | | } |
258 | | |
259 | | ConstructionTransaction(const ConstructionTransaction&) = delete; |
260 | | void operator=(const ConstructionTransaction&) = delete; |
261 | | |
262 | | A& GetAllocator() { return allocator_data_.template get<0>(); } |
263 | | Pointer<A>& GetData() { return allocator_data_.template get<1>(); } |
264 | | SizeType<A>& GetSize() { return size_; } |
265 | | |
266 | | bool DidConstruct() { return GetData() != nullptr; } |
267 | | template <typename ValueAdapter> |
268 | | void Construct(Pointer<A> data, ValueAdapter& values, SizeType<A> size) { |
269 | | ConstructElements<A>(GetAllocator(), data, values, size); |
270 | | GetData() = data; |
271 | | GetSize() = size; |
272 | | } |
273 | | void Commit() && { |
274 | | GetData() = nullptr; |
275 | | GetSize() = 0; |
276 | | } |
277 | | |
278 | | private: |
279 | | container_internal::CompressedTuple<A, Pointer<A>> allocator_data_; |
280 | | SizeType<A> size_; |
281 | | }; |
282 | | |
283 | | template <typename T, size_t N, typename A> |
284 | | class Storage { |
285 | | public: |
286 | | struct MemcpyPolicy {}; |
287 | | struct ElementwiseAssignPolicy {}; |
288 | | struct ElementwiseSwapPolicy {}; |
289 | | struct ElementwiseConstructPolicy {}; |
290 | | |
291 | | using MoveAssignmentPolicy = absl::conditional_t< |
292 | | // Fast path: if the value type can be trivially move assigned and |
293 | | // destroyed, and we know the allocator doesn't do anything fancy, then |
294 | | // it's safe for us to simply adopt the contents of the storage for |
295 | | // `other` and remove its own reference to them. It's as if we had |
296 | | // individually move-assigned each value and then destroyed the original. |
297 | | absl::conjunction<absl::is_trivially_move_assignable<ValueType<A>>, |
298 | | absl::is_trivially_destructible<ValueType<A>>, |
299 | | std::is_same<A, std::allocator<ValueType<A>>>>::value, |
300 | | MemcpyPolicy, |
301 | | // Otherwise we use move assignment if possible. If not, we simulate |
302 | | // move assignment using move construction. |
303 | | // |
304 | | // Note that this is in contrast to e.g. std::vector and std::optional, |
305 | | // which are themselves not move-assignable when their contained type is |
306 | | // not. |
307 | | absl::conditional_t<IsMoveAssignOk<A>::value, ElementwiseAssignPolicy, |
308 | | ElementwiseConstructPolicy>>; |
309 | | |
310 | | // The policy to be used specifically when swapping inlined elements. |
311 | | using SwapInlinedElementsPolicy = absl::conditional_t< |
312 | | // Fast path: if the value type can be trivially relocated, and we |
313 | | // know the allocator doesn't do anything fancy, then it's safe for us |
314 | | // to simply swap the bytes in the inline storage. It's as if we had |
315 | | // relocated the first vector's elements into temporary storage, |
316 | | // relocated the second's elements into the (now-empty) first's, |
317 | | // and then relocated from temporary storage into the second. |
318 | | absl::conjunction<absl::is_trivially_relocatable<ValueType<A>>, |
319 | | std::is_same<A, std::allocator<ValueType<A>>>>::value, |
320 | | MemcpyPolicy, |
321 | | absl::conditional_t<IsSwapOk<A>::value, ElementwiseSwapPolicy, |
322 | | ElementwiseConstructPolicy>>; |
323 | | |
324 | 0 | static SizeType<A> NextCapacity(SizeType<A> current_capacity) { |
325 | 0 | return current_capacity * 2; |
326 | 0 | } Unexecuted instantiation: absl::inlined_vector_internal::Storage<absl::str_format_internal::FormatArgImpl, 4ul, std::__1::allocator<absl::str_format_internal::FormatArgImpl> >::NextCapacity(unsigned long) Unexecuted instantiation: absl::inlined_vector_internal::Storage<absl::LogSink*, 16ul, std::__1::allocator<absl::LogSink*> >::NextCapacity(unsigned long) |
327 | | |
328 | | static SizeType<A> ComputeCapacity(SizeType<A> current_capacity, |
329 | 0 | SizeType<A> requested_capacity) { |
330 | 0 | return (std::max)(NextCapacity(current_capacity), requested_capacity); |
331 | 0 | } |
332 | | |
333 | | // --------------------------------------------------------------------------- |
334 | | // Storage Constructors and Destructor |
335 | | // --------------------------------------------------------------------------- |
336 | | |
337 | 3.09M | Storage() : metadata_(A(), /* size and is_allocated */ 0u) {} |
338 | | |
339 | | explicit Storage(const A& allocator) |
340 | 0 | : metadata_(allocator, /* size and is_allocated */ 0u) {} |
341 | | |
342 | 3.09M | ~Storage() { |
343 | | // Fast path: if we are empty and not allocated, there's nothing to do. |
344 | 3.09M | if (GetSizeAndIsAllocated() == 0) { |
345 | 3.09M | return; |
346 | 3.09M | } |
347 | | |
348 | | // Fast path: if no destructors need to be run and we know the allocator |
349 | | // doesn't do anything fancy, then all we need to do is deallocate (and |
350 | | // maybe not even that). |
351 | 0 | if (absl::is_trivially_destructible<ValueType<A>>::value && |
352 | 0 | std::is_same<A, std::allocator<ValueType<A>>>::value) { |
353 | 0 | DeallocateIfAllocated(); |
354 | 0 | return; |
355 | 0 | } |
356 | | |
357 | 0 | DestroyContents(); |
358 | 0 | } absl::inlined_vector_internal::Storage<absl::LogSink*, 16ul, std::__1::allocator<absl::LogSink*> >::~Storage() Line | Count | Source | 342 | 3.09M | ~Storage() { | 343 | | // Fast path: if we are empty and not allocated, there's nothing to do. | 344 | 3.09M | if (GetSizeAndIsAllocated() == 0) { | 345 | 3.09M | return; | 346 | 3.09M | } | 347 | | | 348 | | // Fast path: if no destructors need to be run and we know the allocator | 349 | | // doesn't do anything fancy, then all we need to do is deallocate (and | 350 | | // maybe not even that). | 351 | 0 | if (absl::is_trivially_destructible<ValueType<A>>::value && | 352 | 0 | std::is_same<A, std::allocator<ValueType<A>>>::value) { | 353 | 0 | DeallocateIfAllocated(); | 354 | 0 | return; | 355 | 0 | } | 356 | | | 357 | 0 | DestroyContents(); | 358 | 0 | } |
Unexecuted instantiation: absl::inlined_vector_internal::Storage<absl::str_format_internal::FormatArgImpl, 4ul, std::__1::allocator<absl::str_format_internal::FormatArgImpl> >::~Storage() |
359 | | |
360 | | // --------------------------------------------------------------------------- |
361 | | // Storage Member Accessors |
362 | | // --------------------------------------------------------------------------- |
363 | | |
364 | 3.09M | SizeType<A>& GetSizeAndIsAllocated() { return metadata_.template get<1>(); } Unexecuted instantiation: absl::inlined_vector_internal::Storage<absl::str_format_internal::FormatArgImpl, 4ul, std::__1::allocator<absl::str_format_internal::FormatArgImpl> >::GetSizeAndIsAllocated() absl::inlined_vector_internal::Storage<absl::LogSink*, 16ul, std::__1::allocator<absl::LogSink*> >::GetSizeAndIsAllocated() Line | Count | Source | 364 | 3.09M | SizeType<A>& GetSizeAndIsAllocated() { return metadata_.template get<1>(); } |
|
365 | | |
366 | 18.4M | const SizeType<A>& GetSizeAndIsAllocated() const { |
367 | 18.4M | return metadata_.template get<1>(); |
368 | 18.4M | } absl::inlined_vector_internal::Storage<absl::str_format_internal::FormatArgImpl, 4ul, std::__1::allocator<absl::str_format_internal::FormatArgImpl> >::GetSizeAndIsAllocated() const Line | Count | Source | 366 | 12.2M | const SizeType<A>& GetSizeAndIsAllocated() const { | 367 | 12.2M | return metadata_.template get<1>(); | 368 | 12.2M | } |
absl::inlined_vector_internal::Storage<absl::LogSink*, 16ul, std::__1::allocator<absl::LogSink*> >::GetSizeAndIsAllocated() const Line | Count | Source | 366 | 6.18M | const SizeType<A>& GetSizeAndIsAllocated() const { | 367 | 6.18M | return metadata_.template get<1>(); | 368 | 6.18M | } |
|
369 | | |
370 | 9.20M | SizeType<A> GetSize() const { return GetSizeAndIsAllocated() >> 1; } absl::inlined_vector_internal::Storage<absl::str_format_internal::FormatArgImpl, 4ul, std::__1::allocator<absl::str_format_internal::FormatArgImpl> >::GetSize() const Line | Count | Source | 370 | 6.11M | SizeType<A> GetSize() const { return GetSizeAndIsAllocated() >> 1; } |
absl::inlined_vector_internal::Storage<absl::LogSink*, 16ul, std::__1::allocator<absl::LogSink*> >::GetSize() const Line | Count | Source | 370 | 3.09M | SizeType<A> GetSize() const { return GetSizeAndIsAllocated() >> 1; } |
|
371 | | |
372 | 9.20M | bool GetIsAllocated() const { return GetSizeAndIsAllocated() & 1; } absl::inlined_vector_internal::Storage<absl::str_format_internal::FormatArgImpl, 4ul, std::__1::allocator<absl::str_format_internal::FormatArgImpl> >::GetIsAllocated() const Line | Count | Source | 372 | 6.11M | bool GetIsAllocated() const { return GetSizeAndIsAllocated() & 1; } |
absl::inlined_vector_internal::Storage<absl::LogSink*, 16ul, std::__1::allocator<absl::LogSink*> >::GetIsAllocated() const Line | Count | Source | 372 | 3.09M | bool GetIsAllocated() const { return GetSizeAndIsAllocated() & 1; } |
|
373 | | |
374 | 0 | Pointer<A> GetAllocatedData() { |
375 | | // GCC 12 has a false-positive -Wmaybe-uninitialized warning here. |
376 | | #if ABSL_INTERNAL_HAVE_MIN_GNUC_VERSION(12, 0) |
377 | | #pragma GCC diagnostic push |
378 | | #pragma GCC diagnostic ignored "-Wmaybe-uninitialized" |
379 | | #endif |
380 | 0 | return data_.allocated.allocated_data; |
381 | | #if ABSL_INTERNAL_HAVE_MIN_GNUC_VERSION(12, 0) |
382 | | #pragma GCC diagnostic pop |
383 | | #endif |
384 | 0 | } Unexecuted instantiation: absl::inlined_vector_internal::Storage<absl::str_format_internal::FormatArgImpl, 4ul, std::__1::allocator<absl::str_format_internal::FormatArgImpl> >::GetAllocatedData() Unexecuted instantiation: absl::inlined_vector_internal::Storage<absl::LogSink*, 16ul, std::__1::allocator<absl::LogSink*> >::GetAllocatedData() |
385 | | |
386 | 0 | ConstPointer<A> GetAllocatedData() const { |
387 | 0 | return data_.allocated.allocated_data; |
388 | 0 | } |
389 | | |
390 | | // ABSL_ATTRIBUTE_NO_SANITIZE_CFI is used because the memory pointed to may be |
391 | | // uninitialized, a common pattern in allocate()+construct() APIs. |
392 | | // https://clang.llvm.org/docs/ControlFlowIntegrity.html#bad-cast-checking |
393 | | // NOTE: When this was written, LLVM documentation did not explicitly |
394 | | // mention that casting `char*` and using `reinterpret_cast` qualifies |
395 | | // as a bad cast. |
396 | 3.09M | ABSL_ATTRIBUTE_NO_SANITIZE_CFI Pointer<A> GetInlinedData() { |
397 | 3.09M | return reinterpret_cast<Pointer<A>>(data_.inlined.inlined_data); |
398 | 3.09M | } Unexecuted instantiation: absl::inlined_vector_internal::Storage<absl::str_format_internal::FormatArgImpl, 4ul, std::__1::allocator<absl::str_format_internal::FormatArgImpl> >::GetInlinedData() absl::inlined_vector_internal::Storage<absl::LogSink*, 16ul, std::__1::allocator<absl::LogSink*> >::GetInlinedData() Line | Count | Source | 396 | 3.09M | ABSL_ATTRIBUTE_NO_SANITIZE_CFI Pointer<A> GetInlinedData() { | 397 | 3.09M | return reinterpret_cast<Pointer<A>>(data_.inlined.inlined_data); | 398 | 3.09M | } |
|
399 | | |
400 | 6.11M | ABSL_ATTRIBUTE_NO_SANITIZE_CFI ConstPointer<A> GetInlinedData() const { |
401 | 6.11M | return reinterpret_cast<ConstPointer<A>>(data_.inlined.inlined_data); |
402 | 6.11M | } |
403 | | |
404 | 0 | SizeType<A> GetAllocatedCapacity() const { |
405 | 0 | return data_.allocated.allocated_capacity; |
406 | 0 | } Unexecuted instantiation: absl::inlined_vector_internal::Storage<absl::str_format_internal::FormatArgImpl, 4ul, std::__1::allocator<absl::str_format_internal::FormatArgImpl> >::GetAllocatedCapacity() const Unexecuted instantiation: absl::inlined_vector_internal::Storage<absl::LogSink*, 16ul, std::__1::allocator<absl::LogSink*> >::GetAllocatedCapacity() const |
407 | | |
408 | 0 | SizeType<A> GetInlinedCapacity() const { |
409 | 0 | return static_cast<SizeType<A>>(kOptimalInlinedSize); |
410 | 0 | } Unexecuted instantiation: absl::inlined_vector_internal::Storage<absl::str_format_internal::FormatArgImpl, 4ul, std::__1::allocator<absl::str_format_internal::FormatArgImpl> >::GetInlinedCapacity() const Unexecuted instantiation: absl::inlined_vector_internal::Storage<absl::LogSink*, 16ul, std::__1::allocator<absl::LogSink*> >::GetInlinedCapacity() const |
411 | | |
412 | 0 | StorageView<A> MakeStorageView() { |
413 | 0 | return GetIsAllocated() ? StorageView<A>{GetAllocatedData(), GetSize(), |
414 | 0 | GetAllocatedCapacity()} |
415 | 0 | : StorageView<A>{GetInlinedData(), GetSize(), |
416 | 0 | GetInlinedCapacity()}; |
417 | 0 | } |
418 | | |
419 | 0 | A& GetAllocator() { return metadata_.template get<0>(); } Unexecuted instantiation: absl::inlined_vector_internal::Storage<absl::str_format_internal::FormatArgImpl, 4ul, std::__1::allocator<absl::str_format_internal::FormatArgImpl> >::GetAllocator() Unexecuted instantiation: absl::inlined_vector_internal::Storage<absl::LogSink*, 16ul, std::__1::allocator<absl::LogSink*> >::GetAllocator() |
420 | | |
421 | | const A& GetAllocator() const { return metadata_.template get<0>(); } |
422 | | |
423 | | // --------------------------------------------------------------------------- |
424 | | // Storage Member Mutators |
425 | | // --------------------------------------------------------------------------- |
426 | | |
427 | | ABSL_ATTRIBUTE_NOINLINE void InitFrom(const Storage& other); |
428 | | |
429 | | template <typename ValueAdapter> |
430 | | void Initialize(ValueAdapter values, SizeType<A> new_size); |
431 | | |
432 | | template <typename ValueAdapter> |
433 | | void Assign(ValueAdapter values, SizeType<A> new_size); |
434 | | |
435 | | template <typename ValueAdapter> |
436 | | void Resize(ValueAdapter values, SizeType<A> new_size); |
437 | | |
438 | | template <typename ValueAdapter> |
439 | | Iterator<A> Insert(ConstIterator<A> pos, ValueAdapter values, |
440 | | SizeType<A> insert_count); |
441 | | |
442 | | template <typename... Args> |
443 | | Reference<A> EmplaceBack(Args&&... args); |
444 | | |
445 | | Iterator<A> Erase(ConstIterator<A> from, ConstIterator<A> to); |
446 | | |
447 | | void Reserve(SizeType<A> requested_capacity); |
448 | | |
449 | | void ShrinkToFit(); |
450 | | |
451 | | void Swap(Storage* other_storage_ptr); |
452 | | |
453 | 0 | void SetIsAllocated() { |
454 | 0 | GetSizeAndIsAllocated() |= static_cast<SizeType<A>>(1); |
455 | 0 | } Unexecuted instantiation: absl::inlined_vector_internal::Storage<absl::str_format_internal::FormatArgImpl, 4ul, std::__1::allocator<absl::str_format_internal::FormatArgImpl> >::SetIsAllocated() Unexecuted instantiation: absl::inlined_vector_internal::Storage<absl::LogSink*, 16ul, std::__1::allocator<absl::LogSink*> >::SetIsAllocated() |
456 | | |
457 | | void UnsetIsAllocated() { |
458 | | GetSizeAndIsAllocated() &= ((std::numeric_limits<SizeType<A>>::max)() - 1); |
459 | | } |
460 | | |
461 | 0 | void SetSize(SizeType<A> size) { |
462 | 0 | GetSizeAndIsAllocated() = |
463 | 0 | (size << 1) | static_cast<SizeType<A>>(GetIsAllocated()); |
464 | 0 | } |
465 | | |
466 | | void SetAllocatedSize(SizeType<A> size) { |
467 | | GetSizeAndIsAllocated() = (size << 1) | static_cast<SizeType<A>>(1); |
468 | | } |
469 | | |
470 | | void SetInlinedSize(SizeType<A> size) { |
471 | | GetSizeAndIsAllocated() = size << static_cast<SizeType<A>>(1); |
472 | | } |
473 | | |
474 | 0 | void AddSize(SizeType<A> count) { |
475 | 0 | GetSizeAndIsAllocated() += count << static_cast<SizeType<A>>(1); |
476 | 0 | } Unexecuted instantiation: absl::inlined_vector_internal::Storage<absl::str_format_internal::FormatArgImpl, 4ul, std::__1::allocator<absl::str_format_internal::FormatArgImpl> >::AddSize(unsigned long) Unexecuted instantiation: absl::inlined_vector_internal::Storage<absl::LogSink*, 16ul, std::__1::allocator<absl::LogSink*> >::AddSize(unsigned long) |
477 | | |
478 | | void SubtractSize(SizeType<A> count) { |
479 | | ABSL_HARDENING_ASSERT(count <= GetSize()); |
480 | | |
481 | | GetSizeAndIsAllocated() -= count << static_cast<SizeType<A>>(1); |
482 | | } |
483 | | |
484 | 0 | void SetAllocation(Allocation<A> allocation) { |
485 | 0 | data_.allocated.allocated_data = allocation.data; |
486 | 0 | data_.allocated.allocated_capacity = allocation.capacity; |
487 | 0 | } Unexecuted instantiation: absl::inlined_vector_internal::Storage<absl::str_format_internal::FormatArgImpl, 4ul, std::__1::allocator<absl::str_format_internal::FormatArgImpl> >::SetAllocation(absl::inlined_vector_internal::Allocation<std::__1::allocator<absl::str_format_internal::FormatArgImpl> >) Unexecuted instantiation: absl::inlined_vector_internal::Storage<absl::LogSink*, 16ul, std::__1::allocator<absl::LogSink*> >::SetAllocation(absl::inlined_vector_internal::Allocation<std::__1::allocator<absl::LogSink*> >) |
488 | | |
489 | | void MemcpyFrom(const Storage& other_storage) { |
490 | | // Assumption check: it doesn't make sense to memcpy inlined elements unless |
491 | | // we know the allocator doesn't do anything fancy, and one of the following |
492 | | // holds: |
493 | | // |
494 | | // * The elements are trivially relocatable. |
495 | | // |
496 | | // * It's possible to trivially assign the elements and then destroy the |
497 | | // source. |
498 | | // |
499 | | // * It's possible to trivially copy construct/assign the elements. |
500 | | // |
501 | | { |
502 | | using V = ValueType<A>; |
503 | | ABSL_HARDENING_ASSERT( |
504 | | other_storage.GetIsAllocated() || |
505 | | (std::is_same<A, std::allocator<V>>::value && |
506 | | ( |
507 | | // First case above |
508 | | absl::is_trivially_relocatable<V>::value || |
509 | | // Second case above |
510 | | (absl::is_trivially_move_assignable<V>::value && |
511 | | absl::is_trivially_destructible<V>::value) || |
512 | | // Third case above |
513 | | (absl::is_trivially_copy_constructible<V>::value || |
514 | | absl::is_trivially_copy_assignable<V>::value)))); |
515 | | } |
516 | | |
517 | | GetSizeAndIsAllocated() = other_storage.GetSizeAndIsAllocated(); |
518 | | data_ = other_storage.data_; |
519 | | } |
520 | | |
521 | 0 | void DeallocateIfAllocated() { |
522 | 0 | if (GetIsAllocated()) { |
523 | 0 | MallocAdapter<A>::Deallocate(GetAllocator(), GetAllocatedData(), |
524 | 0 | GetAllocatedCapacity()); |
525 | 0 | } |
526 | 0 | } Unexecuted instantiation: absl::inlined_vector_internal::Storage<absl::str_format_internal::FormatArgImpl, 4ul, std::__1::allocator<absl::str_format_internal::FormatArgImpl> >::DeallocateIfAllocated() Unexecuted instantiation: absl::inlined_vector_internal::Storage<absl::LogSink*, 16ul, std::__1::allocator<absl::LogSink*> >::DeallocateIfAllocated() |
527 | | |
528 | | private: |
529 | | ABSL_ATTRIBUTE_NOINLINE void DestroyContents(); |
530 | | |
531 | | using Metadata = container_internal::CompressedTuple<A, SizeType<A>>; |
532 | | |
533 | | struct Allocated { |
534 | | Pointer<A> allocated_data; |
535 | | SizeType<A> allocated_capacity; |
536 | | }; |
537 | | |
538 | | // `kOptimalInlinedSize` is an automatically adjusted inlined capacity of the |
539 | | // `InlinedVector`. Sometimes, it is possible to increase the capacity (from |
540 | | // the user requested `N`) without increasing the size of the `InlinedVector`. |
541 | | static constexpr size_t kOptimalInlinedSize = |
542 | | (std::max)(N, sizeof(Allocated) / sizeof(ValueType<A>)); |
543 | | |
544 | | struct Inlined { |
545 | | alignas(ValueType<A>) unsigned char inlined_data[sizeof( |
546 | | ValueType<A>[kOptimalInlinedSize])]; |
547 | | }; |
548 | | |
549 | | union Data { |
550 | | Allocated allocated; |
551 | | Inlined inlined; |
552 | | }; |
553 | | |
554 | | void SwapN(ElementwiseSwapPolicy, Storage* other, SizeType<A> n); |
555 | | void SwapN(ElementwiseConstructPolicy, Storage* other, SizeType<A> n); |
556 | | |
557 | | void SwapInlinedElements(MemcpyPolicy, Storage* other); |
558 | | template <typename NotMemcpyPolicy> |
559 | | void SwapInlinedElements(NotMemcpyPolicy, Storage* other); |
560 | | |
561 | | template <typename... Args> |
562 | | ABSL_ATTRIBUTE_NOINLINE Reference<A> EmplaceBackSlow(Args&&... args); |
563 | | |
564 | | Metadata metadata_; |
565 | | Data data_; |
566 | | }; |
567 | | |
568 | | template <typename T, size_t N, typename A> |
569 | 0 | void Storage<T, N, A>::DestroyContents() { |
570 | 0 | Pointer<A> data = GetIsAllocated() ? GetAllocatedData() : GetInlinedData(); |
571 | 0 | DestroyAdapter<A>::DestroyElements(GetAllocator(), data, GetSize()); |
572 | 0 | DeallocateIfAllocated(); |
573 | 0 | } Unexecuted instantiation: absl::inlined_vector_internal::Storage<absl::str_format_internal::FormatArgImpl, 4ul, std::__1::allocator<absl::str_format_internal::FormatArgImpl> >::DestroyContents() Unexecuted instantiation: absl::inlined_vector_internal::Storage<absl::LogSink*, 16ul, std::__1::allocator<absl::LogSink*> >::DestroyContents() |
574 | | |
575 | | template <typename T, size_t N, typename A> |
576 | | void Storage<T, N, A>::InitFrom(const Storage& other) { |
577 | | const SizeType<A> n = other.GetSize(); |
578 | | ABSL_HARDENING_ASSERT(n > 0); // Empty sources handled handled in caller. |
579 | | ConstPointer<A> src; |
580 | | Pointer<A> dst; |
581 | | if (!other.GetIsAllocated()) { |
582 | | dst = GetInlinedData(); |
583 | | src = other.GetInlinedData(); |
584 | | } else { |
585 | | // Because this is only called from the `InlinedVector` constructors, it's |
586 | | // safe to take on the allocation with size `0`. If `ConstructElements(...)` |
587 | | // throws, deallocation will be automatically handled by `~Storage()`. |
588 | | SizeType<A> requested_capacity = ComputeCapacity(GetInlinedCapacity(), n); |
589 | | Allocation<A> allocation = |
590 | | MallocAdapter<A>::Allocate(GetAllocator(), requested_capacity); |
591 | | SetAllocation(allocation); |
592 | | dst = allocation.data; |
593 | | src = other.GetAllocatedData(); |
594 | | } |
595 | | |
596 | | // Fast path: if the value type is trivially copy constructible and we know |
597 | | // the allocator doesn't do anything fancy, then we know it is legal for us to |
598 | | // simply memcpy the other vector's elements. |
599 | | if (absl::is_trivially_copy_constructible<ValueType<A>>::value && |
600 | | std::is_same<A, std::allocator<ValueType<A>>>::value) { |
601 | | std::memcpy(reinterpret_cast<char*>(dst), |
602 | | reinterpret_cast<const char*>(src), n * sizeof(ValueType<A>)); |
603 | | } else { |
604 | | auto values = IteratorValueAdapter<A, ConstPointer<A>>(src); |
605 | | ConstructElements<A>(GetAllocator(), dst, values, n); |
606 | | } |
607 | | |
608 | | GetSizeAndIsAllocated() = other.GetSizeAndIsAllocated(); |
609 | | } |
610 | | |
611 | | template <typename T, size_t N, typename A> |
612 | | template <typename ValueAdapter> |
613 | | auto Storage<T, N, A>::Initialize(ValueAdapter values, |
614 | 0 | SizeType<A> new_size) -> void { |
615 | | // Only callable from constructors! |
616 | 0 | ABSL_HARDENING_ASSERT(!GetIsAllocated()); |
617 | 0 | ABSL_HARDENING_ASSERT(GetSize() == 0); |
618 | |
|
619 | 0 | Pointer<A> construct_data; |
620 | 0 | if (new_size > GetInlinedCapacity()) { |
621 | | // Because this is only called from the `InlinedVector` constructors, it's |
622 | | // safe to take on the allocation with size `0`. If `ConstructElements(...)` |
623 | | // throws, deallocation will be automatically handled by `~Storage()`. |
624 | 0 | SizeType<A> requested_capacity = |
625 | 0 | ComputeCapacity(GetInlinedCapacity(), new_size); |
626 | 0 | Allocation<A> allocation = |
627 | 0 | MallocAdapter<A>::Allocate(GetAllocator(), requested_capacity); |
628 | 0 | construct_data = allocation.data; |
629 | 0 | SetAllocation(allocation); |
630 | 0 | SetIsAllocated(); |
631 | 0 | } else { |
632 | 0 | construct_data = GetInlinedData(); |
633 | 0 | } |
634 | |
|
635 | 0 | ConstructElements<A>(GetAllocator(), construct_data, values, new_size); |
636 | | |
637 | | // Since the initial size was guaranteed to be `0` and the allocated bit is |
638 | | // already correct for either case, *adding* `new_size` gives us the correct |
639 | | // result faster than setting it directly. |
640 | 0 | AddSize(new_size); |
641 | 0 | } |
642 | | |
643 | | template <typename T, size_t N, typename A> |
644 | | template <typename ValueAdapter> |
645 | | auto Storage<T, N, A>::Assign(ValueAdapter values, |
646 | | SizeType<A> new_size) -> void { |
647 | | StorageView<A> storage_view = MakeStorageView(); |
648 | | |
649 | | AllocationTransaction<A> allocation_tx(GetAllocator()); |
650 | | |
651 | | absl::Span<ValueType<A>> assign_loop; |
652 | | absl::Span<ValueType<A>> construct_loop; |
653 | | absl::Span<ValueType<A>> destroy_loop; |
654 | | |
655 | | if (new_size > storage_view.capacity) { |
656 | | SizeType<A> requested_capacity = |
657 | | ComputeCapacity(storage_view.capacity, new_size); |
658 | | construct_loop = {allocation_tx.Allocate(requested_capacity), new_size}; |
659 | | destroy_loop = {storage_view.data, storage_view.size}; |
660 | | } else if (new_size > storage_view.size) { |
661 | | assign_loop = {storage_view.data, storage_view.size}; |
662 | | construct_loop = {storage_view.data + storage_view.size, |
663 | | new_size - storage_view.size}; |
664 | | } else { |
665 | | assign_loop = {storage_view.data, new_size}; |
666 | | destroy_loop = {storage_view.data + new_size, storage_view.size - new_size}; |
667 | | } |
668 | | |
669 | | AssignElements<A>(assign_loop.data(), values, assign_loop.size()); |
670 | | |
671 | | ConstructElements<A>(GetAllocator(), construct_loop.data(), values, |
672 | | construct_loop.size()); |
673 | | |
674 | | DestroyAdapter<A>::DestroyElements(GetAllocator(), destroy_loop.data(), |
675 | | destroy_loop.size()); |
676 | | |
677 | | if (allocation_tx.DidAllocate()) { |
678 | | DeallocateIfAllocated(); |
679 | | SetAllocation(std::move(allocation_tx).Release()); |
680 | | SetIsAllocated(); |
681 | | } |
682 | | |
683 | | SetSize(new_size); |
684 | | } |
685 | | |
686 | | template <typename T, size_t N, typename A> |
687 | | template <typename ValueAdapter> |
688 | | auto Storage<T, N, A>::Resize(ValueAdapter values, |
689 | | SizeType<A> new_size) -> void { |
690 | | StorageView<A> storage_view = MakeStorageView(); |
691 | | Pointer<A> const base = storage_view.data; |
692 | | const SizeType<A> size = storage_view.size; |
693 | | A& alloc = GetAllocator(); |
694 | | if (new_size <= size) { |
695 | | // Destroy extra old elements. |
696 | | DestroyAdapter<A>::DestroyElements(alloc, base + new_size, size - new_size); |
697 | | } else if (new_size <= storage_view.capacity) { |
698 | | // Construct new elements in place. |
699 | | ConstructElements<A>(alloc, base + size, values, new_size - size); |
700 | | } else { |
701 | | // Steps: |
702 | | // a. Allocate new backing store. |
703 | | // b. Construct new elements in new backing store. |
704 | | // c. Move existing elements from old backing store to new backing store. |
705 | | // d. Destroy all elements in old backing store. |
706 | | // Use transactional wrappers for the first two steps so we can roll |
707 | | // back if necessary due to exceptions. |
708 | | AllocationTransaction<A> allocation_tx(alloc); |
709 | | SizeType<A> requested_capacity = |
710 | | ComputeCapacity(storage_view.capacity, new_size); |
711 | | Pointer<A> new_data = allocation_tx.Allocate(requested_capacity); |
712 | | |
713 | | ConstructionTransaction<A> construction_tx(alloc); |
714 | | construction_tx.Construct(new_data + size, values, new_size - size); |
715 | | |
716 | | IteratorValueAdapter<A, MoveIterator<A>> move_values( |
717 | | (MoveIterator<A>(base))); |
718 | | ConstructElements<A>(alloc, new_data, move_values, size); |
719 | | |
720 | | DestroyAdapter<A>::DestroyElements(alloc, base, size); |
721 | | std::move(construction_tx).Commit(); |
722 | | DeallocateIfAllocated(); |
723 | | SetAllocation(std::move(allocation_tx).Release()); |
724 | | SetIsAllocated(); |
725 | | } |
726 | | SetSize(new_size); |
727 | | } |
728 | | |
729 | | template <typename T, size_t N, typename A> |
730 | | template <typename ValueAdapter> |
731 | | auto Storage<T, N, A>::Insert(ConstIterator<A> pos, ValueAdapter values, |
732 | | SizeType<A> insert_count) -> Iterator<A> { |
733 | | StorageView<A> storage_view = MakeStorageView(); |
734 | | |
735 | | auto insert_index = static_cast<SizeType<A>>( |
736 | | std::distance(ConstIterator<A>(storage_view.data), pos)); |
737 | | SizeType<A> insert_end_index = insert_index + insert_count; |
738 | | SizeType<A> new_size = storage_view.size + insert_count; |
739 | | |
740 | | if (new_size > storage_view.capacity) { |
741 | | AllocationTransaction<A> allocation_tx(GetAllocator()); |
742 | | ConstructionTransaction<A> construction_tx(GetAllocator()); |
743 | | ConstructionTransaction<A> move_construction_tx(GetAllocator()); |
744 | | |
745 | | IteratorValueAdapter<A, MoveIterator<A>> move_values( |
746 | | MoveIterator<A>(storage_view.data)); |
747 | | |
748 | | SizeType<A> requested_capacity = |
749 | | ComputeCapacity(storage_view.capacity, new_size); |
750 | | Pointer<A> new_data = allocation_tx.Allocate(requested_capacity); |
751 | | |
752 | | construction_tx.Construct(new_data + insert_index, values, insert_count); |
753 | | |
754 | | move_construction_tx.Construct(new_data, move_values, insert_index); |
755 | | |
756 | | ConstructElements<A>(GetAllocator(), new_data + insert_end_index, |
757 | | move_values, storage_view.size - insert_index); |
758 | | |
759 | | DestroyAdapter<A>::DestroyElements(GetAllocator(), storage_view.data, |
760 | | storage_view.size); |
761 | | |
762 | | std::move(construction_tx).Commit(); |
763 | | std::move(move_construction_tx).Commit(); |
764 | | DeallocateIfAllocated(); |
765 | | SetAllocation(std::move(allocation_tx).Release()); |
766 | | |
767 | | SetAllocatedSize(new_size); |
768 | | return Iterator<A>(new_data + insert_index); |
769 | | } else { |
770 | | SizeType<A> move_construction_destination_index = |
771 | | (std::max)(insert_end_index, storage_view.size); |
772 | | |
773 | | ConstructionTransaction<A> move_construction_tx(GetAllocator()); |
774 | | |
775 | | IteratorValueAdapter<A, MoveIterator<A>> move_construction_values( |
776 | | MoveIterator<A>(storage_view.data + |
777 | | (move_construction_destination_index - insert_count))); |
778 | | absl::Span<ValueType<A>> move_construction = { |
779 | | storage_view.data + move_construction_destination_index, |
780 | | new_size - move_construction_destination_index}; |
781 | | |
782 | | Pointer<A> move_assignment_values = storage_view.data + insert_index; |
783 | | absl::Span<ValueType<A>> move_assignment = { |
784 | | storage_view.data + insert_end_index, |
785 | | move_construction_destination_index - insert_end_index}; |
786 | | |
787 | | absl::Span<ValueType<A>> insert_assignment = {move_assignment_values, |
788 | | move_construction.size()}; |
789 | | |
790 | | absl::Span<ValueType<A>> insert_construction = { |
791 | | insert_assignment.data() + insert_assignment.size(), |
792 | | insert_count - insert_assignment.size()}; |
793 | | |
794 | | move_construction_tx.Construct(move_construction.data(), |
795 | | move_construction_values, |
796 | | move_construction.size()); |
797 | | |
798 | | for (Pointer<A> |
799 | | destination = move_assignment.data() + move_assignment.size(), |
800 | | last_destination = move_assignment.data(), |
801 | | source = move_assignment_values + move_assignment.size(); |
802 | | ;) { |
803 | | --destination; |
804 | | --source; |
805 | | if (destination < last_destination) break; |
806 | | *destination = std::move(*source); |
807 | | } |
808 | | |
809 | | AssignElements<A>(insert_assignment.data(), values, |
810 | | insert_assignment.size()); |
811 | | |
812 | | ConstructElements<A>(GetAllocator(), insert_construction.data(), values, |
813 | | insert_construction.size()); |
814 | | |
815 | | std::move(move_construction_tx).Commit(); |
816 | | |
817 | | AddSize(insert_count); |
818 | | return Iterator<A>(storage_view.data + insert_index); |
819 | | } |
820 | | } |
821 | | |
822 | | template <typename T, size_t N, typename A> |
823 | | template <typename... Args> |
824 | 0 | auto Storage<T, N, A>::EmplaceBack(Args&&... args) -> Reference<A> { |
825 | 0 | StorageView<A> storage_view = MakeStorageView(); |
826 | 0 | const SizeType<A> n = storage_view.size; |
827 | 0 | if (ABSL_PREDICT_TRUE(n != storage_view.capacity)) { |
828 | | // Fast path; new element fits. |
829 | 0 | Pointer<A> last_ptr = storage_view.data + n; |
830 | 0 | AllocatorTraits<A>::construct(GetAllocator(), last_ptr, |
831 | 0 | std::forward<Args>(args)...); |
832 | 0 | AddSize(1); |
833 | 0 | return *last_ptr; |
834 | 0 | } |
835 | | // TODO(b/173712035): Annotate with musttail attribute to prevent regression. |
836 | 0 | return EmplaceBackSlow(std::forward<Args>(args)...); |
837 | 0 | } |
838 | | |
839 | | template <typename T, size_t N, typename A> |
840 | | template <typename... Args> |
841 | 0 | auto Storage<T, N, A>::EmplaceBackSlow(Args&&... args) -> Reference<A> { |
842 | 0 | StorageView<A> storage_view = MakeStorageView(); |
843 | 0 | AllocationTransaction<A> allocation_tx(GetAllocator()); |
844 | 0 | IteratorValueAdapter<A, MoveIterator<A>> move_values( |
845 | 0 | MoveIterator<A>(storage_view.data)); |
846 | 0 | SizeType<A> requested_capacity = NextCapacity(storage_view.capacity); |
847 | 0 | Pointer<A> construct_data = allocation_tx.Allocate(requested_capacity); |
848 | 0 | Pointer<A> last_ptr = construct_data + storage_view.size; |
849 | | |
850 | | // Construct new element. |
851 | 0 | AllocatorTraits<A>::construct(GetAllocator(), last_ptr, |
852 | 0 | std::forward<Args>(args)...); |
853 | | // Move elements from old backing store to new backing store. |
854 | 0 | ABSL_INTERNAL_TRY { |
855 | 0 | ConstructElements<A>(GetAllocator(), allocation_tx.GetData(), move_values, |
856 | 0 | storage_view.size); |
857 | 0 | } |
858 | 0 | ABSL_INTERNAL_CATCH_ANY { |
859 | 0 | AllocatorTraits<A>::destroy(GetAllocator(), last_ptr); |
860 | 0 | ABSL_INTERNAL_RETHROW; |
861 | 0 | } |
862 | | // Destroy elements in old backing store. |
863 | 0 | DestroyAdapter<A>::DestroyElements(GetAllocator(), storage_view.data, |
864 | 0 | storage_view.size); |
865 | |
|
866 | 0 | DeallocateIfAllocated(); |
867 | 0 | SetAllocation(std::move(allocation_tx).Release()); |
868 | 0 | SetIsAllocated(); |
869 | 0 | AddSize(1); |
870 | 0 | return *last_ptr; |
871 | 0 | } |
872 | | |
873 | | template <typename T, size_t N, typename A> |
874 | | auto Storage<T, N, A>::Erase(ConstIterator<A> from, |
875 | | ConstIterator<A> to) -> Iterator<A> { |
876 | | StorageView<A> storage_view = MakeStorageView(); |
877 | | |
878 | | auto erase_size = static_cast<SizeType<A>>(std::distance(from, to)); |
879 | | auto erase_index = static_cast<SizeType<A>>( |
880 | | std::distance(ConstIterator<A>(storage_view.data), from)); |
881 | | SizeType<A> erase_end_index = erase_index + erase_size; |
882 | | |
883 | | // Fast path: if the value type is trivially relocatable and we know |
884 | | // the allocator doesn't do anything fancy, then we know it is legal for us to |
885 | | // simply destroy the elements in the "erasure window" (which cannot throw) |
886 | | // and then memcpy downward to close the window. |
887 | | if (absl::is_trivially_relocatable<ValueType<A>>::value && |
888 | | std::is_nothrow_destructible<ValueType<A>>::value && |
889 | | std::is_same<A, std::allocator<ValueType<A>>>::value) { |
890 | | DestroyAdapter<A>::DestroyElements( |
891 | | GetAllocator(), storage_view.data + erase_index, erase_size); |
892 | | std::memmove( |
893 | | reinterpret_cast<char*>(storage_view.data + erase_index), |
894 | | reinterpret_cast<const char*>(storage_view.data + erase_end_index), |
895 | | (storage_view.size - erase_end_index) * sizeof(ValueType<A>)); |
896 | | } else { |
897 | | IteratorValueAdapter<A, MoveIterator<A>> move_values( |
898 | | MoveIterator<A>(storage_view.data + erase_end_index)); |
899 | | |
900 | | AssignElements<A>(storage_view.data + erase_index, move_values, |
901 | | storage_view.size - erase_end_index); |
902 | | |
903 | | DestroyAdapter<A>::DestroyElements( |
904 | | GetAllocator(), storage_view.data + (storage_view.size - erase_size), |
905 | | erase_size); |
906 | | } |
907 | | SubtractSize(erase_size); |
908 | | return Iterator<A>(storage_view.data + erase_index); |
909 | | } |
910 | | |
911 | | template <typename T, size_t N, typename A> |
912 | | auto Storage<T, N, A>::Reserve(SizeType<A> requested_capacity) -> void { |
913 | | StorageView<A> storage_view = MakeStorageView(); |
914 | | |
915 | | if (ABSL_PREDICT_FALSE(requested_capacity <= storage_view.capacity)) return; |
916 | | |
917 | | AllocationTransaction<A> allocation_tx(GetAllocator()); |
918 | | |
919 | | IteratorValueAdapter<A, MoveIterator<A>> move_values( |
920 | | MoveIterator<A>(storage_view.data)); |
921 | | |
922 | | SizeType<A> new_requested_capacity = |
923 | | ComputeCapacity(storage_view.capacity, requested_capacity); |
924 | | Pointer<A> new_data = allocation_tx.Allocate(new_requested_capacity); |
925 | | |
926 | | ConstructElements<A>(GetAllocator(), new_data, move_values, |
927 | | storage_view.size); |
928 | | |
929 | | DestroyAdapter<A>::DestroyElements(GetAllocator(), storage_view.data, |
930 | | storage_view.size); |
931 | | |
932 | | DeallocateIfAllocated(); |
933 | | SetAllocation(std::move(allocation_tx).Release()); |
934 | | SetIsAllocated(); |
935 | | } |
936 | | |
937 | | template <typename T, size_t N, typename A> |
938 | | auto Storage<T, N, A>::ShrinkToFit() -> void { |
939 | | // May only be called on allocated instances! |
940 | | ABSL_HARDENING_ASSERT(GetIsAllocated()); |
941 | | |
942 | | StorageView<A> storage_view{GetAllocatedData(), GetSize(), |
943 | | GetAllocatedCapacity()}; |
944 | | |
945 | | if (ABSL_PREDICT_FALSE(storage_view.size == storage_view.capacity)) return; |
946 | | |
947 | | AllocationTransaction<A> allocation_tx(GetAllocator()); |
948 | | |
949 | | IteratorValueAdapter<A, MoveIterator<A>> move_values( |
950 | | MoveIterator<A>(storage_view.data)); |
951 | | |
952 | | Pointer<A> construct_data; |
953 | | if (storage_view.size > GetInlinedCapacity()) { |
954 | | SizeType<A> requested_capacity = storage_view.size; |
955 | | construct_data = allocation_tx.Allocate(requested_capacity); |
956 | | if (allocation_tx.GetCapacity() >= storage_view.capacity) { |
957 | | // Already using the smallest available heap allocation. |
958 | | return; |
959 | | } |
960 | | } else { |
961 | | construct_data = GetInlinedData(); |
962 | | } |
963 | | |
964 | | ABSL_INTERNAL_TRY { |
965 | | ConstructElements<A>(GetAllocator(), construct_data, move_values, |
966 | | storage_view.size); |
967 | | } |
968 | | ABSL_INTERNAL_CATCH_ANY { |
969 | | SetAllocation({storage_view.data, storage_view.capacity}); |
970 | | ABSL_INTERNAL_RETHROW; |
971 | | } |
972 | | |
973 | | DestroyAdapter<A>::DestroyElements(GetAllocator(), storage_view.data, |
974 | | storage_view.size); |
975 | | |
976 | | MallocAdapter<A>::Deallocate(GetAllocator(), storage_view.data, |
977 | | storage_view.capacity); |
978 | | |
979 | | if (allocation_tx.DidAllocate()) { |
980 | | SetAllocation(std::move(allocation_tx).Release()); |
981 | | } else { |
982 | | UnsetIsAllocated(); |
983 | | } |
984 | | } |
985 | | |
986 | | template <typename T, size_t N, typename A> |
987 | | auto Storage<T, N, A>::Swap(Storage* other_storage_ptr) -> void { |
988 | | using std::swap; |
989 | | ABSL_HARDENING_ASSERT(this != other_storage_ptr); |
990 | | |
991 | | if (GetIsAllocated() && other_storage_ptr->GetIsAllocated()) { |
992 | | swap(data_.allocated, other_storage_ptr->data_.allocated); |
993 | | } else if (!GetIsAllocated() && !other_storage_ptr->GetIsAllocated()) { |
994 | | SwapInlinedElements(SwapInlinedElementsPolicy{}, other_storage_ptr); |
995 | | } else { |
996 | | Storage* allocated_ptr = this; |
997 | | Storage* inlined_ptr = other_storage_ptr; |
998 | | if (!allocated_ptr->GetIsAllocated()) swap(allocated_ptr, inlined_ptr); |
999 | | |
1000 | | StorageView<A> allocated_storage_view{ |
1001 | | allocated_ptr->GetAllocatedData(), allocated_ptr->GetSize(), |
1002 | | allocated_ptr->GetAllocatedCapacity()}; |
1003 | | |
1004 | | IteratorValueAdapter<A, MoveIterator<A>> move_values( |
1005 | | MoveIterator<A>(inlined_ptr->GetInlinedData())); |
1006 | | |
1007 | | ABSL_INTERNAL_TRY { |
1008 | | ConstructElements<A>(inlined_ptr->GetAllocator(), |
1009 | | allocated_ptr->GetInlinedData(), move_values, |
1010 | | inlined_ptr->GetSize()); |
1011 | | } |
1012 | | ABSL_INTERNAL_CATCH_ANY { |
1013 | | allocated_ptr->SetAllocation(Allocation<A>{ |
1014 | | allocated_storage_view.data, allocated_storage_view.capacity}); |
1015 | | ABSL_INTERNAL_RETHROW; |
1016 | | } |
1017 | | |
1018 | | DestroyAdapter<A>::DestroyElements(inlined_ptr->GetAllocator(), |
1019 | | inlined_ptr->GetInlinedData(), |
1020 | | inlined_ptr->GetSize()); |
1021 | | |
1022 | | inlined_ptr->SetAllocation(Allocation<A>{allocated_storage_view.data, |
1023 | | allocated_storage_view.capacity}); |
1024 | | } |
1025 | | |
1026 | | swap(GetSizeAndIsAllocated(), other_storage_ptr->GetSizeAndIsAllocated()); |
1027 | | swap(GetAllocator(), other_storage_ptr->GetAllocator()); |
1028 | | } |
1029 | | |
1030 | | template <typename T, size_t N, typename A> |
1031 | | void Storage<T, N, A>::SwapN(ElementwiseSwapPolicy, Storage* other, |
1032 | | SizeType<A> n) { |
1033 | | std::swap_ranges(GetInlinedData(), GetInlinedData() + n, |
1034 | | other->GetInlinedData()); |
1035 | | } |
1036 | | |
1037 | | template <typename T, size_t N, typename A> |
1038 | | void Storage<T, N, A>::SwapN(ElementwiseConstructPolicy, Storage* other, |
1039 | | SizeType<A> n) { |
1040 | | Pointer<A> a = GetInlinedData(); |
1041 | | Pointer<A> b = other->GetInlinedData(); |
1042 | | // see note on allocators in `SwapInlinedElements`. |
1043 | | A& allocator_a = GetAllocator(); |
1044 | | A& allocator_b = other->GetAllocator(); |
1045 | | for (SizeType<A> i = 0; i < n; ++i, ++a, ++b) { |
1046 | | ValueType<A> tmp(std::move(*a)); |
1047 | | |
1048 | | AllocatorTraits<A>::destroy(allocator_a, a); |
1049 | | AllocatorTraits<A>::construct(allocator_b, a, std::move(*b)); |
1050 | | |
1051 | | AllocatorTraits<A>::destroy(allocator_b, b); |
1052 | | AllocatorTraits<A>::construct(allocator_a, b, std::move(tmp)); |
1053 | | } |
1054 | | } |
1055 | | |
1056 | | template <typename T, size_t N, typename A> |
1057 | | void Storage<T, N, A>::SwapInlinedElements(MemcpyPolicy, Storage* other) { |
1058 | | Data tmp = data_; |
1059 | | data_ = other->data_; |
1060 | | other->data_ = tmp; |
1061 | | } |
1062 | | |
1063 | | template <typename T, size_t N, typename A> |
1064 | | template <typename NotMemcpyPolicy> |
1065 | | void Storage<T, N, A>::SwapInlinedElements(NotMemcpyPolicy policy, |
1066 | | Storage* other) { |
1067 | | // Note: `destroy` needs to use pre-swap allocator while `construct` - |
1068 | | // post-swap allocator. Allocators will be swapped later on outside of |
1069 | | // `SwapInlinedElements`. |
1070 | | Storage* small_ptr = this; |
1071 | | Storage* large_ptr = other; |
1072 | | if (small_ptr->GetSize() > large_ptr->GetSize()) { |
1073 | | std::swap(small_ptr, large_ptr); |
1074 | | } |
1075 | | |
1076 | | auto small_size = small_ptr->GetSize(); |
1077 | | auto diff = large_ptr->GetSize() - small_size; |
1078 | | SwapN(policy, other, small_size); |
1079 | | |
1080 | | IteratorValueAdapter<A, MoveIterator<A>> move_values( |
1081 | | MoveIterator<A>(large_ptr->GetInlinedData() + small_size)); |
1082 | | |
1083 | | ConstructElements<A>(large_ptr->GetAllocator(), |
1084 | | small_ptr->GetInlinedData() + small_size, move_values, |
1085 | | diff); |
1086 | | |
1087 | | DestroyAdapter<A>::DestroyElements(large_ptr->GetAllocator(), |
1088 | | large_ptr->GetInlinedData() + small_size, |
1089 | | diff); |
1090 | | } |
1091 | | |
1092 | | // End ignore "array-bounds" |
1093 | | #if !defined(__clang__) && defined(__GNUC__) |
1094 | | #pragma GCC diagnostic pop |
1095 | | #endif |
1096 | | |
1097 | | } // namespace inlined_vector_internal |
1098 | | ABSL_NAMESPACE_END |
1099 | | } // namespace absl |
1100 | | |
1101 | | #endif // ABSL_CONTAINER_INTERNAL_INLINED_VECTOR_H_ |