/src/abseil-cpp/absl/container/internal/inlined_vector.h
Line | Count | Source (jump to first uncovered line) |
1 | | // Copyright 2019 The Abseil Authors. |
2 | | // |
3 | | // Licensed under the Apache License, Version 2.0 (the "License"); |
4 | | // you may not use this file except in compliance with the License. |
5 | | // You may obtain a copy of the License at |
6 | | // |
7 | | // https://www.apache.org/licenses/LICENSE-2.0 |
8 | | // |
9 | | // Unless required by applicable law or agreed to in writing, software |
10 | | // distributed under the License is distributed on an "AS IS" BASIS, |
11 | | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
12 | | // See the License for the specific language governing permissions and |
13 | | // limitations under the License. |
14 | | |
15 | | #ifndef ABSL_CONTAINER_INTERNAL_INLINED_VECTOR_H_ |
16 | | #define ABSL_CONTAINER_INTERNAL_INLINED_VECTOR_H_ |
17 | | |
18 | | #include <algorithm> |
19 | | #include <cstddef> |
20 | | #include <cstring> |
21 | | #include <iterator> |
22 | | #include <limits> |
23 | | #include <memory> |
24 | | #include <new> |
25 | | #include <type_traits> |
26 | | #include <utility> |
27 | | |
28 | | #include "absl/base/attributes.h" |
29 | | #include "absl/base/config.h" |
30 | | #include "absl/base/internal/identity.h" |
31 | | #include "absl/base/macros.h" |
32 | | #include "absl/container/internal/compressed_tuple.h" |
33 | | #include "absl/memory/memory.h" |
34 | | #include "absl/meta/type_traits.h" |
35 | | #include "absl/types/span.h" |
36 | | |
37 | | namespace absl { |
38 | | ABSL_NAMESPACE_BEGIN |
39 | | namespace inlined_vector_internal { |
40 | | |
41 | | // GCC does not deal very well with the below code |
42 | | #if !defined(__clang__) && defined(__GNUC__) |
43 | | #pragma GCC diagnostic push |
44 | | #pragma GCC diagnostic ignored "-Warray-bounds" |
45 | | #endif |
46 | | |
47 | | template <typename A> |
48 | | using AllocatorTraits = std::allocator_traits<A>; |
49 | | template <typename A> |
50 | | using ValueType = typename AllocatorTraits<A>::value_type; |
51 | | template <typename A> |
52 | | using SizeType = typename AllocatorTraits<A>::size_type; |
53 | | template <typename A> |
54 | | using Pointer = typename AllocatorTraits<A>::pointer; |
55 | | template <typename A> |
56 | | using ConstPointer = typename AllocatorTraits<A>::const_pointer; |
57 | | template <typename A> |
58 | | using SizeType = typename AllocatorTraits<A>::size_type; |
59 | | template <typename A> |
60 | | using DifferenceType = typename AllocatorTraits<A>::difference_type; |
61 | | template <typename A> |
62 | | using Reference = ValueType<A>&; |
63 | | template <typename A> |
64 | | using ConstReference = const ValueType<A>&; |
65 | | template <typename A> |
66 | | using Iterator = Pointer<A>; |
67 | | template <typename A> |
68 | | using ConstIterator = ConstPointer<A>; |
69 | | template <typename A> |
70 | | using ReverseIterator = typename std::reverse_iterator<Iterator<A>>; |
71 | | template <typename A> |
72 | | using ConstReverseIterator = typename std::reverse_iterator<ConstIterator<A>>; |
73 | | template <typename A> |
74 | | using MoveIterator = typename std::move_iterator<Iterator<A>>; |
75 | | |
76 | | template <typename A> |
77 | | using IsMoveAssignOk = std::is_move_assignable<ValueType<A>>; |
78 | | template <typename A> |
79 | | using IsSwapOk = absl::type_traits_internal::IsSwappable<ValueType<A>>; |
80 | | |
81 | | template <typename A, |
82 | | bool IsTriviallyDestructible = |
83 | | absl::is_trivially_destructible<ValueType<A>>::value && |
84 | | std::is_same<A, std::allocator<ValueType<A>>>::value> |
85 | | struct DestroyAdapter; |
86 | | |
87 | | template <typename A> |
88 | | struct DestroyAdapter<A, /* IsTriviallyDestructible */ false> { |
89 | | static void DestroyElements(A& allocator, Pointer<A> destroy_first, |
90 | | SizeType<A> destroy_size) { |
91 | | for (SizeType<A> i = destroy_size; i != 0;) { |
92 | | --i; |
93 | | AllocatorTraits<A>::destroy(allocator, destroy_first + i); |
94 | | } |
95 | | } |
96 | | }; |
97 | | |
98 | | template <typename A> |
99 | | struct DestroyAdapter<A, /* IsTriviallyDestructible */ true> { |
100 | | static void DestroyElements(A& allocator, Pointer<A> destroy_first, |
101 | 0 | SizeType<A> destroy_size) { |
102 | 0 | static_cast<void>(allocator); |
103 | 0 | static_cast<void>(destroy_first); |
104 | 0 | static_cast<void>(destroy_size); |
105 | 0 | } Unexecuted instantiation: absl::inlined_vector_internal::DestroyAdapter<std::__1::allocator<absl::LogSink*>, true>::DestroyElements(std::__1::allocator<absl::LogSink*>&, absl::LogSink**, unsigned long) Unexecuted instantiation: absl::inlined_vector_internal::DestroyAdapter<std::__1::allocator<absl::str_format_internal::FormatArgImpl>, true>::DestroyElements(std::__1::allocator<absl::str_format_internal::FormatArgImpl>&, absl::str_format_internal::FormatArgImpl*, unsigned long) |
106 | | }; |
107 | | |
108 | | template <typename A> |
109 | | struct Allocation { |
110 | | Pointer<A> data = nullptr; |
111 | | SizeType<A> capacity = 0; |
112 | | }; |
113 | | |
114 | | template <typename A, |
115 | | bool IsOverAligned = |
116 | | (alignof(ValueType<A>) > ABSL_INTERNAL_DEFAULT_NEW_ALIGNMENT)> |
117 | | struct MallocAdapter { |
118 | 0 | static Allocation<A> Allocate(A& allocator, SizeType<A> requested_capacity) { |
119 | 0 | return {AllocatorTraits<A>::allocate(allocator, requested_capacity), |
120 | 0 | requested_capacity}; |
121 | 0 | } Unexecuted instantiation: absl::inlined_vector_internal::MallocAdapter<std::__1::allocator<absl::LogSink*>, false>::Allocate(std::__1::allocator<absl::LogSink*>&, unsigned long) Unexecuted instantiation: absl::inlined_vector_internal::MallocAdapter<std::__1::allocator<absl::str_format_internal::FormatArgImpl>, false>::Allocate(std::__1::allocator<absl::str_format_internal::FormatArgImpl>&, unsigned long) |
122 | | |
123 | | static void Deallocate(A& allocator, Pointer<A> pointer, |
124 | 0 | SizeType<A> capacity) { |
125 | 0 | AllocatorTraits<A>::deallocate(allocator, pointer, capacity); |
126 | 0 | } Unexecuted instantiation: absl::inlined_vector_internal::MallocAdapter<std::__1::allocator<absl::LogSink*>, false>::Deallocate(std::__1::allocator<absl::LogSink*>&, absl::LogSink**, unsigned long) Unexecuted instantiation: absl::inlined_vector_internal::MallocAdapter<std::__1::allocator<absl::str_format_internal::FormatArgImpl>, false>::Deallocate(std::__1::allocator<absl::str_format_internal::FormatArgImpl>&, absl::str_format_internal::FormatArgImpl*, unsigned long) |
127 | | }; |
128 | | |
129 | | template <typename A, typename ValueAdapter> |
130 | | void ConstructElements(absl::internal::type_identity_t<A>& allocator, |
131 | | Pointer<A> construct_first, ValueAdapter& values, |
132 | 0 | SizeType<A> construct_size) { |
133 | 0 | for (SizeType<A> i = 0; i < construct_size; ++i) { |
134 | 0 | ABSL_INTERNAL_TRY { values.ConstructNext(allocator, construct_first + i); } |
135 | 0 | ABSL_INTERNAL_CATCH_ANY { |
136 | 0 | DestroyAdapter<A>::DestroyElements(allocator, construct_first, i); |
137 | 0 | ABSL_INTERNAL_RETHROW; |
138 | 0 | } |
139 | 0 | } |
140 | 0 | } Unexecuted instantiation: void absl::inlined_vector_internal::ConstructElements<std::__1::allocator<absl::LogSink*>, absl::inlined_vector_internal::IteratorValueAdapter<std::__1::allocator<absl::LogSink*>, std::__1::move_iterator<absl::LogSink**> > >(absl::internal::type_identity<std::__1::allocator<absl::LogSink*> >::type&, std::__1::allocator_traits<std::__1::allocator<absl::LogSink*> >::pointer, absl::inlined_vector_internal::IteratorValueAdapter<std::__1::allocator<absl::LogSink*>, std::__1::move_iterator<absl::LogSink**> >&, std::__1::allocator_traits<std::__1::allocator<absl::LogSink*> >::size_type) Unexecuted instantiation: void absl::inlined_vector_internal::ConstructElements<std::__1::allocator<absl::str_format_internal::FormatArgImpl>, absl::inlined_vector_internal::IteratorValueAdapter<std::__1::allocator<absl::str_format_internal::FormatArgImpl>, absl::str_format_internal::FormatArgImpl const*> >(absl::internal::type_identity<std::__1::allocator<absl::str_format_internal::FormatArgImpl> >::type&, std::__1::allocator_traits<std::__1::allocator<absl::str_format_internal::FormatArgImpl> >::pointer, absl::inlined_vector_internal::IteratorValueAdapter<std::__1::allocator<absl::str_format_internal::FormatArgImpl>, absl::str_format_internal::FormatArgImpl const*>&, std::__1::allocator_traits<std::__1::allocator<absl::str_format_internal::FormatArgImpl> >::size_type) |
141 | | |
142 | | template <typename A, typename ValueAdapter> |
143 | | void AssignElements(Pointer<A> assign_first, ValueAdapter& values, |
144 | | SizeType<A> assign_size) { |
145 | | for (SizeType<A> i = 0; i < assign_size; ++i) { |
146 | | values.AssignNext(assign_first + i); |
147 | | } |
148 | | } |
149 | | |
150 | | template <typename A> |
151 | | struct StorageView { |
152 | | Pointer<A> data; |
153 | | SizeType<A> size; |
154 | | SizeType<A> capacity; |
155 | | }; |
156 | | |
157 | | template <typename A, typename Iterator> |
158 | | class IteratorValueAdapter { |
159 | | public: |
160 | 0 | explicit IteratorValueAdapter(const Iterator& it) : it_(it) {} |
161 | | |
162 | 0 | void ConstructNext(A& allocator, Pointer<A> construct_at) { |
163 | 0 | AllocatorTraits<A>::construct(allocator, construct_at, *it_); |
164 | 0 | ++it_; |
165 | 0 | } Unexecuted instantiation: absl::inlined_vector_internal::IteratorValueAdapter<std::__1::allocator<absl::LogSink*>, std::__1::move_iterator<absl::LogSink**> >::ConstructNext(std::__1::allocator<absl::LogSink*>&, absl::LogSink**) Unexecuted instantiation: absl::inlined_vector_internal::IteratorValueAdapter<std::__1::allocator<absl::str_format_internal::FormatArgImpl>, absl::str_format_internal::FormatArgImpl const*>::ConstructNext(std::__1::allocator<absl::str_format_internal::FormatArgImpl>&, absl::str_format_internal::FormatArgImpl*) |
166 | | |
167 | | void AssignNext(Pointer<A> assign_at) { |
168 | | *assign_at = *it_; |
169 | | ++it_; |
170 | | } |
171 | | |
172 | | private: |
173 | | Iterator it_; |
174 | | }; |
175 | | |
176 | | template <typename A> |
177 | | class CopyValueAdapter { |
178 | | public: |
179 | | explicit CopyValueAdapter(ConstPointer<A> p) : ptr_(p) {} |
180 | | |
181 | | void ConstructNext(A& allocator, Pointer<A> construct_at) { |
182 | | AllocatorTraits<A>::construct(allocator, construct_at, *ptr_); |
183 | | } |
184 | | |
185 | | void AssignNext(Pointer<A> assign_at) { *assign_at = *ptr_; } |
186 | | |
187 | | private: |
188 | | ConstPointer<A> ptr_; |
189 | | }; |
190 | | |
191 | | template <typename A> |
192 | | class DefaultValueAdapter { |
193 | | public: |
194 | | explicit DefaultValueAdapter() {} |
195 | | |
196 | | void ConstructNext(A& allocator, Pointer<A> construct_at) { |
197 | | AllocatorTraits<A>::construct(allocator, construct_at); |
198 | | } |
199 | | |
200 | | void AssignNext(Pointer<A> assign_at) { *assign_at = ValueType<A>(); } |
201 | | }; |
202 | | |
203 | | template <typename A> |
204 | | class AllocationTransaction { |
205 | | public: |
206 | | explicit AllocationTransaction(A& allocator) |
207 | 0 | : allocator_data_(allocator, nullptr), capacity_(0) {} |
208 | | |
209 | 0 | ~AllocationTransaction() { |
210 | 0 | if (DidAllocate()) { |
211 | 0 | MallocAdapter<A>::Deallocate(GetAllocator(), GetData(), GetCapacity()); |
212 | 0 | } |
213 | 0 | } |
214 | | |
215 | | AllocationTransaction(const AllocationTransaction&) = delete; |
216 | | void operator=(const AllocationTransaction&) = delete; |
217 | | |
218 | 0 | A& GetAllocator() { return allocator_data_.template get<0>(); } |
219 | 0 | Pointer<A>& GetData() { return allocator_data_.template get<1>(); } |
220 | 0 | SizeType<A>& GetCapacity() { return capacity_; } |
221 | | |
222 | 0 | bool DidAllocate() { return GetData() != nullptr; } |
223 | | |
224 | 0 | Pointer<A> Allocate(SizeType<A> requested_capacity) { |
225 | 0 | Allocation<A> result = |
226 | 0 | MallocAdapter<A>::Allocate(GetAllocator(), requested_capacity); |
227 | 0 | GetData() = result.data; |
228 | 0 | GetCapacity() = result.capacity; |
229 | 0 | return result.data; |
230 | 0 | } |
231 | | |
232 | 0 | [[nodiscard]] Allocation<A> Release() && { |
233 | 0 | Allocation<A> result = {GetData(), GetCapacity()}; |
234 | 0 | Reset(); |
235 | 0 | return result; |
236 | 0 | } |
237 | | |
238 | | private: |
239 | 0 | void Reset() { |
240 | 0 | GetData() = nullptr; |
241 | 0 | GetCapacity() = 0; |
242 | 0 | } |
243 | | |
244 | | container_internal::CompressedTuple<A, Pointer<A>> allocator_data_; |
245 | | SizeType<A> capacity_; |
246 | | }; |
247 | | |
248 | | template <typename A> |
249 | | class ConstructionTransaction { |
250 | | public: |
251 | | explicit ConstructionTransaction(A& allocator) |
252 | | : allocator_data_(allocator, nullptr), size_(0) {} |
253 | | |
254 | | ~ConstructionTransaction() { |
255 | | if (DidConstruct()) { |
256 | | DestroyAdapter<A>::DestroyElements(GetAllocator(), GetData(), GetSize()); |
257 | | } |
258 | | } |
259 | | |
260 | | ConstructionTransaction(const ConstructionTransaction&) = delete; |
261 | | void operator=(const ConstructionTransaction&) = delete; |
262 | | |
263 | | A& GetAllocator() { return allocator_data_.template get<0>(); } |
264 | | Pointer<A>& GetData() { return allocator_data_.template get<1>(); } |
265 | | SizeType<A>& GetSize() { return size_; } |
266 | | |
267 | | bool DidConstruct() { return GetData() != nullptr; } |
268 | | template <typename ValueAdapter> |
269 | | void Construct(Pointer<A> data, ValueAdapter& values, SizeType<A> size) { |
270 | | ConstructElements<A>(GetAllocator(), data, values, size); |
271 | | GetData() = data; |
272 | | GetSize() = size; |
273 | | } |
274 | | void Commit() && { |
275 | | GetData() = nullptr; |
276 | | GetSize() = 0; |
277 | | } |
278 | | |
279 | | private: |
280 | | container_internal::CompressedTuple<A, Pointer<A>> allocator_data_; |
281 | | SizeType<A> size_; |
282 | | }; |
283 | | |
284 | | template <typename T, size_t N, typename A> |
285 | | class Storage { |
286 | | public: |
287 | | struct MemcpyPolicy {}; |
288 | | struct ElementwiseAssignPolicy {}; |
289 | | struct ElementwiseSwapPolicy {}; |
290 | | struct ElementwiseConstructPolicy {}; |
291 | | |
292 | | using MoveAssignmentPolicy = absl::conditional_t< |
293 | | // Fast path: if the value type can be trivially move assigned and |
294 | | // destroyed, and we know the allocator doesn't do anything fancy, then |
295 | | // it's safe for us to simply adopt the contents of the storage for |
296 | | // `other` and remove its own reference to them. It's as if we had |
297 | | // individually move-assigned each value and then destroyed the original. |
298 | | absl::conjunction<absl::is_trivially_move_assignable<ValueType<A>>, |
299 | | absl::is_trivially_destructible<ValueType<A>>, |
300 | | std::is_same<A, std::allocator<ValueType<A>>>>::value, |
301 | | MemcpyPolicy, |
302 | | // Otherwise we use move assignment if possible. If not, we simulate |
303 | | // move assignment using move construction. |
304 | | // |
305 | | // Note that this is in contrast to e.g. std::vector and std::optional, |
306 | | // which are themselves not move-assignable when their contained type is |
307 | | // not. |
308 | | absl::conditional_t<IsMoveAssignOk<A>::value, ElementwiseAssignPolicy, |
309 | | ElementwiseConstructPolicy>>; |
310 | | |
311 | | // The policy to be used specifically when swapping inlined elements. |
312 | | using SwapInlinedElementsPolicy = absl::conditional_t< |
313 | | // Fast path: if the value type can be trivially relocated, and we |
314 | | // know the allocator doesn't do anything fancy, then it's safe for us |
315 | | // to simply swap the bytes in the inline storage. It's as if we had |
316 | | // relocated the first vector's elements into temporary storage, |
317 | | // relocated the second's elements into the (now-empty) first's, |
318 | | // and then relocated from temporary storage into the second. |
319 | | absl::conjunction<absl::is_trivially_relocatable<ValueType<A>>, |
320 | | std::is_same<A, std::allocator<ValueType<A>>>>::value, |
321 | | MemcpyPolicy, |
322 | | absl::conditional_t<IsSwapOk<A>::value, ElementwiseSwapPolicy, |
323 | | ElementwiseConstructPolicy>>; |
324 | | |
325 | 0 | static SizeType<A> NextCapacity(SizeType<A> current_capacity) { |
326 | 0 | return current_capacity * 2; |
327 | 0 | } Unexecuted instantiation: absl::inlined_vector_internal::Storage<absl::LogSink*, 16ul, std::__1::allocator<absl::LogSink*> >::NextCapacity(unsigned long) Unexecuted instantiation: absl::inlined_vector_internal::Storage<absl::str_format_internal::FormatArgImpl, 4ul, std::__1::allocator<absl::str_format_internal::FormatArgImpl> >::NextCapacity(unsigned long) |
328 | | |
329 | | static SizeType<A> ComputeCapacity(SizeType<A> current_capacity, |
330 | 0 | SizeType<A> requested_capacity) { |
331 | 0 | return (std::max)(NextCapacity(current_capacity), requested_capacity); |
332 | 0 | } |
333 | | |
334 | | // --------------------------------------------------------------------------- |
335 | | // Storage Constructors and Destructor |
336 | | // --------------------------------------------------------------------------- |
337 | | |
338 | 0 | Storage() : metadata_(A(), /* size and is_allocated */ 0u) {} |
339 | | |
340 | | explicit Storage(const A& allocator) |
341 | 0 | : metadata_(allocator, /* size and is_allocated */ 0u) {} |
342 | | |
343 | 0 | ~Storage() { |
344 | | // Fast path: if we are empty and not allocated, there's nothing to do. |
345 | 0 | if (GetSizeAndIsAllocated() == 0) { |
346 | 0 | return; |
347 | 0 | } |
348 | | |
349 | | // Fast path: if no destructors need to be run and we know the allocator |
350 | | // doesn't do anything fancy, then all we need to do is deallocate (and |
351 | | // maybe not even that). |
352 | 0 | if (absl::is_trivially_destructible<ValueType<A>>::value && |
353 | 0 | std::is_same<A, std::allocator<ValueType<A>>>::value) { |
354 | 0 | DeallocateIfAllocated(); |
355 | 0 | return; |
356 | 0 | } |
357 | | |
358 | 0 | DestroyContents(); |
359 | 0 | } Unexecuted instantiation: absl::inlined_vector_internal::Storage<absl::LogSink*, 16ul, std::__1::allocator<absl::LogSink*> >::~Storage() Unexecuted instantiation: absl::inlined_vector_internal::Storage<absl::str_format_internal::FormatArgImpl, 4ul, std::__1::allocator<absl::str_format_internal::FormatArgImpl> >::~Storage() |
360 | | |
361 | | // --------------------------------------------------------------------------- |
362 | | // Storage Member Accessors |
363 | | // --------------------------------------------------------------------------- |
364 | | |
365 | 0 | SizeType<A>& GetSizeAndIsAllocated() { return metadata_.template get<1>(); } Unexecuted instantiation: absl::inlined_vector_internal::Storage<absl::LogSink*, 16ul, std::__1::allocator<absl::LogSink*> >::GetSizeAndIsAllocated() Unexecuted instantiation: absl::inlined_vector_internal::Storage<absl::str_format_internal::FormatArgImpl, 4ul, std::__1::allocator<absl::str_format_internal::FormatArgImpl> >::GetSizeAndIsAllocated() |
366 | | |
367 | 0 | const SizeType<A>& GetSizeAndIsAllocated() const { |
368 | 0 | return metadata_.template get<1>(); |
369 | 0 | } Unexecuted instantiation: absl::inlined_vector_internal::Storage<absl::LogSink*, 16ul, std::__1::allocator<absl::LogSink*> >::GetSizeAndIsAllocated() const Unexecuted instantiation: absl::inlined_vector_internal::Storage<absl::str_format_internal::FormatArgImpl, 4ul, std::__1::allocator<absl::str_format_internal::FormatArgImpl> >::GetSizeAndIsAllocated() const |
370 | | |
371 | 0 | SizeType<A> GetSize() const { return GetSizeAndIsAllocated() >> 1; } Unexecuted instantiation: absl::inlined_vector_internal::Storage<absl::LogSink*, 16ul, std::__1::allocator<absl::LogSink*> >::GetSize() const Unexecuted instantiation: absl::inlined_vector_internal::Storage<absl::str_format_internal::FormatArgImpl, 4ul, std::__1::allocator<absl::str_format_internal::FormatArgImpl> >::GetSize() const |
372 | | |
373 | 0 | bool GetIsAllocated() const { return GetSizeAndIsAllocated() & 1; } Unexecuted instantiation: absl::inlined_vector_internal::Storage<absl::LogSink*, 16ul, std::__1::allocator<absl::LogSink*> >::GetIsAllocated() const Unexecuted instantiation: absl::inlined_vector_internal::Storage<absl::str_format_internal::FormatArgImpl, 4ul, std::__1::allocator<absl::str_format_internal::FormatArgImpl> >::GetIsAllocated() const |
374 | | |
375 | 0 | Pointer<A> GetAllocatedData() { |
376 | | // GCC 12 has a false-positive -Wmaybe-uninitialized warning here. |
377 | | #if ABSL_INTERNAL_HAVE_MIN_GNUC_VERSION(12, 0) |
378 | | #pragma GCC diagnostic push |
379 | | #pragma GCC diagnostic ignored "-Wmaybe-uninitialized" |
380 | | #endif |
381 | 0 | return data_.allocated.allocated_data; |
382 | | #if ABSL_INTERNAL_HAVE_MIN_GNUC_VERSION(12, 0) |
383 | | #pragma GCC diagnostic pop |
384 | | #endif |
385 | 0 | } Unexecuted instantiation: absl::inlined_vector_internal::Storage<absl::LogSink*, 16ul, std::__1::allocator<absl::LogSink*> >::GetAllocatedData() Unexecuted instantiation: absl::inlined_vector_internal::Storage<absl::str_format_internal::FormatArgImpl, 4ul, std::__1::allocator<absl::str_format_internal::FormatArgImpl> >::GetAllocatedData() |
386 | | |
387 | 0 | ConstPointer<A> GetAllocatedData() const { |
388 | 0 | return data_.allocated.allocated_data; |
389 | 0 | } |
390 | | |
391 | | // ABSL_ATTRIBUTE_NO_SANITIZE_CFI is used because the memory pointed to may be |
392 | | // uninitialized, a common pattern in allocate()+construct() APIs. |
393 | | // https://clang.llvm.org/docs/ControlFlowIntegrity.html#bad-cast-checking |
394 | | // NOTE: When this was written, LLVM documentation did not explicitly |
395 | | // mention that casting `char*` and using `reinterpret_cast` qualifies |
396 | | // as a bad cast. |
397 | 0 | ABSL_ATTRIBUTE_NO_SANITIZE_CFI Pointer<A> GetInlinedData() { |
398 | 0 | return reinterpret_cast<Pointer<A>>(data_.inlined.inlined_data); |
399 | 0 | } Unexecuted instantiation: absl::inlined_vector_internal::Storage<absl::LogSink*, 16ul, std::__1::allocator<absl::LogSink*> >::GetInlinedData() Unexecuted instantiation: absl::inlined_vector_internal::Storage<absl::str_format_internal::FormatArgImpl, 4ul, std::__1::allocator<absl::str_format_internal::FormatArgImpl> >::GetInlinedData() |
400 | | |
401 | 0 | ABSL_ATTRIBUTE_NO_SANITIZE_CFI ConstPointer<A> GetInlinedData() const { |
402 | 0 | return reinterpret_cast<ConstPointer<A>>(data_.inlined.inlined_data); |
403 | 0 | } |
404 | | |
405 | 0 | SizeType<A> GetAllocatedCapacity() const { |
406 | 0 | return data_.allocated.allocated_capacity; |
407 | 0 | } Unexecuted instantiation: absl::inlined_vector_internal::Storage<absl::LogSink*, 16ul, std::__1::allocator<absl::LogSink*> >::GetAllocatedCapacity() const Unexecuted instantiation: absl::inlined_vector_internal::Storage<absl::str_format_internal::FormatArgImpl, 4ul, std::__1::allocator<absl::str_format_internal::FormatArgImpl> >::GetAllocatedCapacity() const |
408 | | |
409 | 0 | SizeType<A> GetInlinedCapacity() const { |
410 | 0 | return static_cast<SizeType<A>>(kOptimalInlinedSize); |
411 | 0 | } Unexecuted instantiation: absl::inlined_vector_internal::Storage<absl::LogSink*, 16ul, std::__1::allocator<absl::LogSink*> >::GetInlinedCapacity() const Unexecuted instantiation: absl::inlined_vector_internal::Storage<absl::str_format_internal::FormatArgImpl, 4ul, std::__1::allocator<absl::str_format_internal::FormatArgImpl> >::GetInlinedCapacity() const |
412 | | |
413 | 0 | StorageView<A> MakeStorageView() { |
414 | 0 | return GetIsAllocated() ? StorageView<A>{GetAllocatedData(), GetSize(), |
415 | 0 | GetAllocatedCapacity()} |
416 | 0 | : StorageView<A>{GetInlinedData(), GetSize(), |
417 | 0 | GetInlinedCapacity()}; |
418 | 0 | } |
419 | | |
420 | 0 | A& GetAllocator() { return metadata_.template get<0>(); } Unexecuted instantiation: absl::inlined_vector_internal::Storage<absl::LogSink*, 16ul, std::__1::allocator<absl::LogSink*> >::GetAllocator() Unexecuted instantiation: absl::inlined_vector_internal::Storage<absl::str_format_internal::FormatArgImpl, 4ul, std::__1::allocator<absl::str_format_internal::FormatArgImpl> >::GetAllocator() |
421 | | |
422 | | const A& GetAllocator() const { return metadata_.template get<0>(); } |
423 | | |
424 | | // --------------------------------------------------------------------------- |
425 | | // Storage Member Mutators |
426 | | // --------------------------------------------------------------------------- |
427 | | |
428 | | ABSL_ATTRIBUTE_NOINLINE void InitFrom(const Storage& other); |
429 | | |
430 | | template <typename ValueAdapter> |
431 | | void Initialize(ValueAdapter values, SizeType<A> new_size); |
432 | | |
433 | | template <typename ValueAdapter> |
434 | | void Assign(ValueAdapter values, SizeType<A> new_size); |
435 | | |
436 | | template <typename ValueAdapter> |
437 | | void Resize(ValueAdapter values, SizeType<A> new_size); |
438 | | |
439 | | template <typename ValueAdapter> |
440 | | Iterator<A> Insert(ConstIterator<A> pos, ValueAdapter values, |
441 | | SizeType<A> insert_count); |
442 | | |
443 | | template <typename... Args> |
444 | | Reference<A> EmplaceBack(Args&&... args); |
445 | | |
446 | | Iterator<A> Erase(ConstIterator<A> from, ConstIterator<A> to); |
447 | | |
448 | | void Reserve(SizeType<A> requested_capacity); |
449 | | |
450 | | void ShrinkToFit(); |
451 | | |
452 | | void Swap(Storage* other_storage_ptr); |
453 | | |
454 | 0 | void SetIsAllocated() { |
455 | 0 | GetSizeAndIsAllocated() |= static_cast<SizeType<A>>(1); |
456 | 0 | } Unexecuted instantiation: absl::inlined_vector_internal::Storage<absl::LogSink*, 16ul, std::__1::allocator<absl::LogSink*> >::SetIsAllocated() Unexecuted instantiation: absl::inlined_vector_internal::Storage<absl::str_format_internal::FormatArgImpl, 4ul, std::__1::allocator<absl::str_format_internal::FormatArgImpl> >::SetIsAllocated() |
457 | | |
458 | | void UnsetIsAllocated() { |
459 | | GetSizeAndIsAllocated() &= ((std::numeric_limits<SizeType<A>>::max)() - 1); |
460 | | } |
461 | | |
462 | | void SetSize(SizeType<A> size) { |
463 | | GetSizeAndIsAllocated() = |
464 | | (size << 1) | static_cast<SizeType<A>>(GetIsAllocated()); |
465 | | } |
466 | | |
467 | | void SetAllocatedSize(SizeType<A> size) { |
468 | | GetSizeAndIsAllocated() = (size << 1) | static_cast<SizeType<A>>(1); |
469 | | } |
470 | | |
471 | 0 | void SetInlinedSize(SizeType<A> size) { |
472 | 0 | GetSizeAndIsAllocated() = size << static_cast<SizeType<A>>(1); |
473 | 0 | } |
474 | | |
475 | 0 | void AddSize(SizeType<A> count) { |
476 | 0 | GetSizeAndIsAllocated() += count << static_cast<SizeType<A>>(1); |
477 | 0 | } Unexecuted instantiation: absl::inlined_vector_internal::Storage<absl::LogSink*, 16ul, std::__1::allocator<absl::LogSink*> >::AddSize(unsigned long) Unexecuted instantiation: absl::inlined_vector_internal::Storage<absl::str_format_internal::FormatArgImpl, 4ul, std::__1::allocator<absl::str_format_internal::FormatArgImpl> >::AddSize(unsigned long) |
478 | | |
479 | | void SubtractSize(SizeType<A> count) { |
480 | | ABSL_HARDENING_ASSERT(count <= GetSize()); |
481 | | |
482 | | GetSizeAndIsAllocated() -= count << static_cast<SizeType<A>>(1); |
483 | | } |
484 | | |
485 | 0 | void SetAllocation(Allocation<A> allocation) { |
486 | 0 | data_.allocated.allocated_data = allocation.data; |
487 | 0 | data_.allocated.allocated_capacity = allocation.capacity; |
488 | 0 | } Unexecuted instantiation: absl::inlined_vector_internal::Storage<absl::LogSink*, 16ul, std::__1::allocator<absl::LogSink*> >::SetAllocation(absl::inlined_vector_internal::Allocation<std::__1::allocator<absl::LogSink*> >) Unexecuted instantiation: absl::inlined_vector_internal::Storage<absl::str_format_internal::FormatArgImpl, 4ul, std::__1::allocator<absl::str_format_internal::FormatArgImpl> >::SetAllocation(absl::inlined_vector_internal::Allocation<std::__1::allocator<absl::str_format_internal::FormatArgImpl> >) |
489 | | |
490 | | void MemcpyFrom(const Storage& other_storage) { |
491 | | // Assumption check: it doesn't make sense to memcpy inlined elements unless |
492 | | // we know the allocator doesn't do anything fancy, and one of the following |
493 | | // holds: |
494 | | // |
495 | | // * The elements are trivially relocatable. |
496 | | // |
497 | | // * It's possible to trivially assign the elements and then destroy the |
498 | | // source. |
499 | | // |
500 | | // * It's possible to trivially copy construct/assign the elements. |
501 | | // |
502 | | { |
503 | | using V = ValueType<A>; |
504 | | ABSL_HARDENING_ASSERT( |
505 | | other_storage.GetIsAllocated() || |
506 | | (std::is_same<A, std::allocator<V>>::value && |
507 | | ( |
508 | | // First case above |
509 | | absl::is_trivially_relocatable<V>::value || |
510 | | // Second case above |
511 | | (absl::is_trivially_move_assignable<V>::value && |
512 | | absl::is_trivially_destructible<V>::value) || |
513 | | // Third case above |
514 | | (absl::is_trivially_copy_constructible<V>::value || |
515 | | absl::is_trivially_copy_assignable<V>::value)))); |
516 | | } |
517 | | |
518 | | GetSizeAndIsAllocated() = other_storage.GetSizeAndIsAllocated(); |
519 | | data_ = other_storage.data_; |
520 | | } |
521 | | |
522 | 0 | void DeallocateIfAllocated() { |
523 | 0 | if (GetIsAllocated()) { |
524 | 0 | MallocAdapter<A>::Deallocate(GetAllocator(), GetAllocatedData(), |
525 | 0 | GetAllocatedCapacity()); |
526 | 0 | } |
527 | 0 | } Unexecuted instantiation: absl::inlined_vector_internal::Storage<absl::LogSink*, 16ul, std::__1::allocator<absl::LogSink*> >::DeallocateIfAllocated() Unexecuted instantiation: absl::inlined_vector_internal::Storage<absl::str_format_internal::FormatArgImpl, 4ul, std::__1::allocator<absl::str_format_internal::FormatArgImpl> >::DeallocateIfAllocated() |
528 | | |
529 | | private: |
530 | | ABSL_ATTRIBUTE_NOINLINE void DestroyContents(); |
531 | | |
532 | | using Metadata = container_internal::CompressedTuple<A, SizeType<A>>; |
533 | | |
534 | | struct Allocated { |
535 | | Pointer<A> allocated_data; |
536 | | SizeType<A> allocated_capacity; |
537 | | }; |
538 | | |
539 | | // `kOptimalInlinedSize` is an automatically adjusted inlined capacity of the |
540 | | // `InlinedVector`. Sometimes, it is possible to increase the capacity (from |
541 | | // the user requested `N`) without increasing the size of the `InlinedVector`. |
542 | | static constexpr size_t kOptimalInlinedSize = |
543 | | (std::max)(N, sizeof(Allocated) / sizeof(ValueType<A>)); |
544 | | |
545 | | struct Inlined { |
546 | | alignas(ValueType<A>) unsigned char inlined_data[sizeof( |
547 | | ValueType<A>[kOptimalInlinedSize])]; |
548 | | }; |
549 | | |
550 | | union Data { |
551 | | Allocated allocated; |
552 | | Inlined inlined; |
553 | | }; |
554 | | |
555 | | void SwapN(ElementwiseSwapPolicy, Storage* other, SizeType<A> n); |
556 | | void SwapN(ElementwiseConstructPolicy, Storage* other, SizeType<A> n); |
557 | | |
558 | | void SwapInlinedElements(MemcpyPolicy, Storage* other); |
559 | | template <typename NotMemcpyPolicy> |
560 | | void SwapInlinedElements(NotMemcpyPolicy, Storage* other); |
561 | | |
562 | | template <typename... Args> |
563 | | ABSL_ATTRIBUTE_NOINLINE Reference<A> EmplaceBackSlow(Args&&... args); |
564 | | |
565 | | Metadata metadata_; |
566 | | Data data_; |
567 | | }; |
568 | | |
569 | | template <typename T, size_t N, typename A> |
570 | 0 | void Storage<T, N, A>::DestroyContents() { |
571 | 0 | Pointer<A> data = GetIsAllocated() ? GetAllocatedData() : GetInlinedData(); |
572 | 0 | DestroyAdapter<A>::DestroyElements(GetAllocator(), data, GetSize()); |
573 | 0 | DeallocateIfAllocated(); |
574 | 0 | } Unexecuted instantiation: absl::inlined_vector_internal::Storage<absl::LogSink*, 16ul, std::__1::allocator<absl::LogSink*> >::DestroyContents() Unexecuted instantiation: absl::inlined_vector_internal::Storage<absl::str_format_internal::FormatArgImpl, 4ul, std::__1::allocator<absl::str_format_internal::FormatArgImpl> >::DestroyContents() |
575 | | |
576 | | template <typename T, size_t N, typename A> |
577 | | void Storage<T, N, A>::InitFrom(const Storage& other) { |
578 | | const SizeType<A> n = other.GetSize(); |
579 | | ABSL_HARDENING_ASSERT(n > 0); // Empty sources handled handled in caller. |
580 | | ConstPointer<A> src; |
581 | | Pointer<A> dst; |
582 | | if (!other.GetIsAllocated()) { |
583 | | dst = GetInlinedData(); |
584 | | src = other.GetInlinedData(); |
585 | | } else { |
586 | | // Because this is only called from the `InlinedVector` constructors, it's |
587 | | // safe to take on the allocation with size `0`. If `ConstructElements(...)` |
588 | | // throws, deallocation will be automatically handled by `~Storage()`. |
589 | | SizeType<A> requested_capacity = ComputeCapacity(GetInlinedCapacity(), n); |
590 | | Allocation<A> allocation = |
591 | | MallocAdapter<A>::Allocate(GetAllocator(), requested_capacity); |
592 | | SetAllocation(allocation); |
593 | | dst = allocation.data; |
594 | | src = other.GetAllocatedData(); |
595 | | } |
596 | | |
597 | | // Fast path: if the value type is trivially copy constructible and we know |
598 | | // the allocator doesn't do anything fancy, then we know it is legal for us to |
599 | | // simply memcpy the other vector's elements. |
600 | | if (absl::is_trivially_copy_constructible<ValueType<A>>::value && |
601 | | std::is_same<A, std::allocator<ValueType<A>>>::value) { |
602 | | std::memcpy(reinterpret_cast<char*>(dst), |
603 | | reinterpret_cast<const char*>(src), n * sizeof(ValueType<A>)); |
604 | | } else { |
605 | | auto values = IteratorValueAdapter<A, ConstPointer<A>>(src); |
606 | | ConstructElements<A>(GetAllocator(), dst, values, n); |
607 | | } |
608 | | |
609 | | GetSizeAndIsAllocated() = other.GetSizeAndIsAllocated(); |
610 | | } |
611 | | |
612 | | template <typename T, size_t N, typename A> |
613 | | template <typename ValueAdapter> |
614 | | auto Storage<T, N, A>::Initialize(ValueAdapter values, |
615 | 0 | SizeType<A> new_size) -> void { |
616 | | // Only callable from constructors! |
617 | 0 | ABSL_HARDENING_ASSERT(!GetIsAllocated()); |
618 | 0 | ABSL_HARDENING_ASSERT(GetSize() == 0); |
619 | |
|
620 | 0 | Pointer<A> construct_data; |
621 | 0 | if (new_size > GetInlinedCapacity()) { |
622 | | // Because this is only called from the `InlinedVector` constructors, it's |
623 | | // safe to take on the allocation with size `0`. If `ConstructElements(...)` |
624 | | // throws, deallocation will be automatically handled by `~Storage()`. |
625 | 0 | SizeType<A> requested_capacity = |
626 | 0 | ComputeCapacity(GetInlinedCapacity(), new_size); |
627 | 0 | Allocation<A> allocation = |
628 | 0 | MallocAdapter<A>::Allocate(GetAllocator(), requested_capacity); |
629 | 0 | construct_data = allocation.data; |
630 | 0 | SetAllocation(allocation); |
631 | 0 | SetIsAllocated(); |
632 | 0 | } else { |
633 | 0 | construct_data = GetInlinedData(); |
634 | 0 | } |
635 | |
|
636 | 0 | ConstructElements<A>(GetAllocator(), construct_data, values, new_size); |
637 | | |
638 | | // Since the initial size was guaranteed to be `0` and the allocated bit is |
639 | | // already correct for either case, *adding* `new_size` gives us the correct |
640 | | // result faster than setting it directly. |
641 | 0 | AddSize(new_size); |
642 | 0 | } |
643 | | |
644 | | template <typename T, size_t N, typename A> |
645 | | template <typename ValueAdapter> |
646 | | auto Storage<T, N, A>::Assign(ValueAdapter values, |
647 | | SizeType<A> new_size) -> void { |
648 | | StorageView<A> storage_view = MakeStorageView(); |
649 | | |
650 | | AllocationTransaction<A> allocation_tx(GetAllocator()); |
651 | | |
652 | | absl::Span<ValueType<A>> assign_loop; |
653 | | absl::Span<ValueType<A>> construct_loop; |
654 | | absl::Span<ValueType<A>> destroy_loop; |
655 | | |
656 | | if (new_size > storage_view.capacity) { |
657 | | SizeType<A> requested_capacity = |
658 | | ComputeCapacity(storage_view.capacity, new_size); |
659 | | construct_loop = {allocation_tx.Allocate(requested_capacity), new_size}; |
660 | | destroy_loop = {storage_view.data, storage_view.size}; |
661 | | } else if (new_size > storage_view.size) { |
662 | | assign_loop = {storage_view.data, storage_view.size}; |
663 | | construct_loop = {storage_view.data + storage_view.size, |
664 | | new_size - storage_view.size}; |
665 | | } else { |
666 | | assign_loop = {storage_view.data, new_size}; |
667 | | destroy_loop = {storage_view.data + new_size, storage_view.size - new_size}; |
668 | | } |
669 | | |
670 | | AssignElements<A>(assign_loop.data(), values, assign_loop.size()); |
671 | | |
672 | | ConstructElements<A>(GetAllocator(), construct_loop.data(), values, |
673 | | construct_loop.size()); |
674 | | |
675 | | DestroyAdapter<A>::DestroyElements(GetAllocator(), destroy_loop.data(), |
676 | | destroy_loop.size()); |
677 | | |
678 | | if (allocation_tx.DidAllocate()) { |
679 | | DeallocateIfAllocated(); |
680 | | SetAllocation(std::move(allocation_tx).Release()); |
681 | | SetIsAllocated(); |
682 | | } |
683 | | |
684 | | SetSize(new_size); |
685 | | } |
686 | | |
687 | | template <typename T, size_t N, typename A> |
688 | | template <typename ValueAdapter> |
689 | | auto Storage<T, N, A>::Resize(ValueAdapter values, |
690 | | SizeType<A> new_size) -> void { |
691 | | StorageView<A> storage_view = MakeStorageView(); |
692 | | Pointer<A> const base = storage_view.data; |
693 | | const SizeType<A> size = storage_view.size; |
694 | | A& alloc = GetAllocator(); |
695 | | if (new_size <= size) { |
696 | | // Destroy extra old elements. |
697 | | DestroyAdapter<A>::DestroyElements(alloc, base + new_size, size - new_size); |
698 | | } else if (new_size <= storage_view.capacity) { |
699 | | // Construct new elements in place. |
700 | | ConstructElements<A>(alloc, base + size, values, new_size - size); |
701 | | } else { |
702 | | // Steps: |
703 | | // a. Allocate new backing store. |
704 | | // b. Construct new elements in new backing store. |
705 | | // c. Move existing elements from old backing store to new backing store. |
706 | | // d. Destroy all elements in old backing store. |
707 | | // Use transactional wrappers for the first two steps so we can roll |
708 | | // back if necessary due to exceptions. |
709 | | AllocationTransaction<A> allocation_tx(alloc); |
710 | | SizeType<A> requested_capacity = |
711 | | ComputeCapacity(storage_view.capacity, new_size); |
712 | | Pointer<A> new_data = allocation_tx.Allocate(requested_capacity); |
713 | | |
714 | | ConstructionTransaction<A> construction_tx(alloc); |
715 | | construction_tx.Construct(new_data + size, values, new_size - size); |
716 | | |
717 | | IteratorValueAdapter<A, MoveIterator<A>> move_values( |
718 | | (MoveIterator<A>(base))); |
719 | | ConstructElements<A>(alloc, new_data, move_values, size); |
720 | | |
721 | | DestroyAdapter<A>::DestroyElements(alloc, base, size); |
722 | | std::move(construction_tx).Commit(); |
723 | | DeallocateIfAllocated(); |
724 | | SetAllocation(std::move(allocation_tx).Release()); |
725 | | SetIsAllocated(); |
726 | | } |
727 | | SetSize(new_size); |
728 | | } |
729 | | |
730 | | template <typename T, size_t N, typename A> |
731 | | template <typename ValueAdapter> |
732 | | auto Storage<T, N, A>::Insert(ConstIterator<A> pos, ValueAdapter values, |
733 | | SizeType<A> insert_count) -> Iterator<A> { |
734 | | StorageView<A> storage_view = MakeStorageView(); |
735 | | |
736 | | auto insert_index = static_cast<SizeType<A>>( |
737 | | std::distance(ConstIterator<A>(storage_view.data), pos)); |
738 | | SizeType<A> insert_end_index = insert_index + insert_count; |
739 | | SizeType<A> new_size = storage_view.size + insert_count; |
740 | | |
741 | | if (new_size > storage_view.capacity) { |
742 | | AllocationTransaction<A> allocation_tx(GetAllocator()); |
743 | | ConstructionTransaction<A> construction_tx(GetAllocator()); |
744 | | ConstructionTransaction<A> move_construction_tx(GetAllocator()); |
745 | | |
746 | | IteratorValueAdapter<A, MoveIterator<A>> move_values( |
747 | | MoveIterator<A>(storage_view.data)); |
748 | | |
749 | | SizeType<A> requested_capacity = |
750 | | ComputeCapacity(storage_view.capacity, new_size); |
751 | | Pointer<A> new_data = allocation_tx.Allocate(requested_capacity); |
752 | | |
753 | | construction_tx.Construct(new_data + insert_index, values, insert_count); |
754 | | |
755 | | move_construction_tx.Construct(new_data, move_values, insert_index); |
756 | | |
757 | | ConstructElements<A>(GetAllocator(), new_data + insert_end_index, |
758 | | move_values, storage_view.size - insert_index); |
759 | | |
760 | | DestroyAdapter<A>::DestroyElements(GetAllocator(), storage_view.data, |
761 | | storage_view.size); |
762 | | |
763 | | std::move(construction_tx).Commit(); |
764 | | std::move(move_construction_tx).Commit(); |
765 | | DeallocateIfAllocated(); |
766 | | SetAllocation(std::move(allocation_tx).Release()); |
767 | | |
768 | | SetAllocatedSize(new_size); |
769 | | return Iterator<A>(new_data + insert_index); |
770 | | } else { |
771 | | SizeType<A> move_construction_destination_index = |
772 | | (std::max)(insert_end_index, storage_view.size); |
773 | | |
774 | | ConstructionTransaction<A> move_construction_tx(GetAllocator()); |
775 | | |
776 | | IteratorValueAdapter<A, MoveIterator<A>> move_construction_values( |
777 | | MoveIterator<A>(storage_view.data + |
778 | | (move_construction_destination_index - insert_count))); |
779 | | absl::Span<ValueType<A>> move_construction = { |
780 | | storage_view.data + move_construction_destination_index, |
781 | | new_size - move_construction_destination_index}; |
782 | | |
783 | | Pointer<A> move_assignment_values = storage_view.data + insert_index; |
784 | | absl::Span<ValueType<A>> move_assignment = { |
785 | | storage_view.data + insert_end_index, |
786 | | move_construction_destination_index - insert_end_index}; |
787 | | |
788 | | absl::Span<ValueType<A>> insert_assignment = {move_assignment_values, |
789 | | move_construction.size()}; |
790 | | |
791 | | absl::Span<ValueType<A>> insert_construction = { |
792 | | insert_assignment.data() + insert_assignment.size(), |
793 | | insert_count - insert_assignment.size()}; |
794 | | |
795 | | move_construction_tx.Construct(move_construction.data(), |
796 | | move_construction_values, |
797 | | move_construction.size()); |
798 | | |
799 | | for (Pointer<A> |
800 | | destination = move_assignment.data() + move_assignment.size(), |
801 | | last_destination = move_assignment.data(), |
802 | | source = move_assignment_values + move_assignment.size(); |
803 | | ;) { |
804 | | --destination; |
805 | | --source; |
806 | | if (destination < last_destination) break; |
807 | | *destination = std::move(*source); |
808 | | } |
809 | | |
810 | | AssignElements<A>(insert_assignment.data(), values, |
811 | | insert_assignment.size()); |
812 | | |
813 | | ConstructElements<A>(GetAllocator(), insert_construction.data(), values, |
814 | | insert_construction.size()); |
815 | | |
816 | | std::move(move_construction_tx).Commit(); |
817 | | |
818 | | AddSize(insert_count); |
819 | | return Iterator<A>(storage_view.data + insert_index); |
820 | | } |
821 | | } |
822 | | |
823 | | template <typename T, size_t N, typename A> |
824 | | template <typename... Args> |
825 | 0 | auto Storage<T, N, A>::EmplaceBack(Args&&... args) -> Reference<A> { |
826 | 0 | StorageView<A> storage_view = MakeStorageView(); |
827 | 0 | const SizeType<A> n = storage_view.size; |
828 | 0 | if (ABSL_PREDICT_TRUE(n != storage_view.capacity)) { |
829 | | // Fast path; new element fits. |
830 | 0 | Pointer<A> last_ptr = storage_view.data + n; |
831 | 0 | AllocatorTraits<A>::construct(GetAllocator(), last_ptr, |
832 | 0 | std::forward<Args>(args)...); |
833 | 0 | AddSize(1); |
834 | 0 | return *last_ptr; |
835 | 0 | } |
836 | | // TODO(b/173712035): Annotate with musttail attribute to prevent regression. |
837 | 0 | return EmplaceBackSlow(std::forward<Args>(args)...); |
838 | 0 | } |
839 | | |
840 | | template <typename T, size_t N, typename A> |
841 | | template <typename... Args> |
842 | 0 | auto Storage<T, N, A>::EmplaceBackSlow(Args&&... args) -> Reference<A> { |
843 | 0 | StorageView<A> storage_view = MakeStorageView(); |
844 | 0 | AllocationTransaction<A> allocation_tx(GetAllocator()); |
845 | 0 | IteratorValueAdapter<A, MoveIterator<A>> move_values( |
846 | 0 | MoveIterator<A>(storage_view.data)); |
847 | 0 | SizeType<A> requested_capacity = NextCapacity(storage_view.capacity); |
848 | 0 | Pointer<A> construct_data = allocation_tx.Allocate(requested_capacity); |
849 | 0 | Pointer<A> last_ptr = construct_data + storage_view.size; |
850 | | |
851 | | // Construct new element. |
852 | 0 | AllocatorTraits<A>::construct(GetAllocator(), last_ptr, |
853 | 0 | std::forward<Args>(args)...); |
854 | | // Move elements from old backing store to new backing store. |
855 | 0 | ABSL_INTERNAL_TRY { |
856 | 0 | ConstructElements<A>(GetAllocator(), allocation_tx.GetData(), move_values, |
857 | 0 | storage_view.size); |
858 | 0 | } |
859 | 0 | ABSL_INTERNAL_CATCH_ANY { |
860 | 0 | AllocatorTraits<A>::destroy(GetAllocator(), last_ptr); |
861 | 0 | ABSL_INTERNAL_RETHROW; |
862 | 0 | } |
863 | | // Destroy elements in old backing store. |
864 | 0 | DestroyAdapter<A>::DestroyElements(GetAllocator(), storage_view.data, |
865 | 0 | storage_view.size); |
866 | |
|
867 | 0 | DeallocateIfAllocated(); |
868 | 0 | SetAllocation(std::move(allocation_tx).Release()); |
869 | 0 | SetIsAllocated(); |
870 | 0 | AddSize(1); |
871 | 0 | return *last_ptr; |
872 | 0 | } |
873 | | |
874 | | template <typename T, size_t N, typename A> |
875 | | auto Storage<T, N, A>::Erase(ConstIterator<A> from, |
876 | | ConstIterator<A> to) -> Iterator<A> { |
877 | | StorageView<A> storage_view = MakeStorageView(); |
878 | | |
879 | | auto erase_size = static_cast<SizeType<A>>(std::distance(from, to)); |
880 | | auto erase_index = static_cast<SizeType<A>>( |
881 | | std::distance(ConstIterator<A>(storage_view.data), from)); |
882 | | SizeType<A> erase_end_index = erase_index + erase_size; |
883 | | |
884 | | // Fast path: if the value type is trivially relocatable and we know |
885 | | // the allocator doesn't do anything fancy, then we know it is legal for us to |
886 | | // simply destroy the elements in the "erasure window" (which cannot throw) |
887 | | // and then memcpy downward to close the window. |
888 | | if (absl::is_trivially_relocatable<ValueType<A>>::value && |
889 | | std::is_nothrow_destructible<ValueType<A>>::value && |
890 | | std::is_same<A, std::allocator<ValueType<A>>>::value) { |
891 | | DestroyAdapter<A>::DestroyElements( |
892 | | GetAllocator(), storage_view.data + erase_index, erase_size); |
893 | | std::memmove( |
894 | | reinterpret_cast<char*>(storage_view.data + erase_index), |
895 | | reinterpret_cast<const char*>(storage_view.data + erase_end_index), |
896 | | (storage_view.size - erase_end_index) * sizeof(ValueType<A>)); |
897 | | } else { |
898 | | IteratorValueAdapter<A, MoveIterator<A>> move_values( |
899 | | MoveIterator<A>(storage_view.data + erase_end_index)); |
900 | | |
901 | | AssignElements<A>(storage_view.data + erase_index, move_values, |
902 | | storage_view.size - erase_end_index); |
903 | | |
904 | | DestroyAdapter<A>::DestroyElements( |
905 | | GetAllocator(), storage_view.data + (storage_view.size - erase_size), |
906 | | erase_size); |
907 | | } |
908 | | SubtractSize(erase_size); |
909 | | return Iterator<A>(storage_view.data + erase_index); |
910 | | } |
911 | | |
912 | | template <typename T, size_t N, typename A> |
913 | | auto Storage<T, N, A>::Reserve(SizeType<A> requested_capacity) -> void { |
914 | | StorageView<A> storage_view = MakeStorageView(); |
915 | | |
916 | | if (ABSL_PREDICT_FALSE(requested_capacity <= storage_view.capacity)) return; |
917 | | |
918 | | AllocationTransaction<A> allocation_tx(GetAllocator()); |
919 | | |
920 | | IteratorValueAdapter<A, MoveIterator<A>> move_values( |
921 | | MoveIterator<A>(storage_view.data)); |
922 | | |
923 | | SizeType<A> new_requested_capacity = |
924 | | ComputeCapacity(storage_view.capacity, requested_capacity); |
925 | | Pointer<A> new_data = allocation_tx.Allocate(new_requested_capacity); |
926 | | |
927 | | ConstructElements<A>(GetAllocator(), new_data, move_values, |
928 | | storage_view.size); |
929 | | |
930 | | DestroyAdapter<A>::DestroyElements(GetAllocator(), storage_view.data, |
931 | | storage_view.size); |
932 | | |
933 | | DeallocateIfAllocated(); |
934 | | SetAllocation(std::move(allocation_tx).Release()); |
935 | | SetIsAllocated(); |
936 | | } |
937 | | |
938 | | template <typename T, size_t N, typename A> |
939 | | auto Storage<T, N, A>::ShrinkToFit() -> void { |
940 | | // May only be called on allocated instances! |
941 | | ABSL_HARDENING_ASSERT(GetIsAllocated()); |
942 | | |
943 | | StorageView<A> storage_view{GetAllocatedData(), GetSize(), |
944 | | GetAllocatedCapacity()}; |
945 | | |
946 | | if (ABSL_PREDICT_FALSE(storage_view.size == storage_view.capacity)) return; |
947 | | |
948 | | AllocationTransaction<A> allocation_tx(GetAllocator()); |
949 | | |
950 | | IteratorValueAdapter<A, MoveIterator<A>> move_values( |
951 | | MoveIterator<A>(storage_view.data)); |
952 | | |
953 | | Pointer<A> construct_data; |
954 | | if (storage_view.size > GetInlinedCapacity()) { |
955 | | SizeType<A> requested_capacity = storage_view.size; |
956 | | construct_data = allocation_tx.Allocate(requested_capacity); |
957 | | if (allocation_tx.GetCapacity() >= storage_view.capacity) { |
958 | | // Already using the smallest available heap allocation. |
959 | | return; |
960 | | } |
961 | | } else { |
962 | | construct_data = GetInlinedData(); |
963 | | } |
964 | | |
965 | | ABSL_INTERNAL_TRY { |
966 | | ConstructElements<A>(GetAllocator(), construct_data, move_values, |
967 | | storage_view.size); |
968 | | } |
969 | | ABSL_INTERNAL_CATCH_ANY { |
970 | | SetAllocation({storage_view.data, storage_view.capacity}); |
971 | | ABSL_INTERNAL_RETHROW; |
972 | | } |
973 | | |
974 | | DestroyAdapter<A>::DestroyElements(GetAllocator(), storage_view.data, |
975 | | storage_view.size); |
976 | | |
977 | | MallocAdapter<A>::Deallocate(GetAllocator(), storage_view.data, |
978 | | storage_view.capacity); |
979 | | |
980 | | if (allocation_tx.DidAllocate()) { |
981 | | SetAllocation(std::move(allocation_tx).Release()); |
982 | | } else { |
983 | | UnsetIsAllocated(); |
984 | | } |
985 | | } |
986 | | |
987 | | template <typename T, size_t N, typename A> |
988 | | auto Storage<T, N, A>::Swap(Storage* other_storage_ptr) -> void { |
989 | | using std::swap; |
990 | | ABSL_HARDENING_ASSERT(this != other_storage_ptr); |
991 | | |
992 | | if (GetIsAllocated() && other_storage_ptr->GetIsAllocated()) { |
993 | | swap(data_.allocated, other_storage_ptr->data_.allocated); |
994 | | } else if (!GetIsAllocated() && !other_storage_ptr->GetIsAllocated()) { |
995 | | SwapInlinedElements(SwapInlinedElementsPolicy{}, other_storage_ptr); |
996 | | } else { |
997 | | Storage* allocated_ptr = this; |
998 | | Storage* inlined_ptr = other_storage_ptr; |
999 | | if (!allocated_ptr->GetIsAllocated()) swap(allocated_ptr, inlined_ptr); |
1000 | | |
1001 | | StorageView<A> allocated_storage_view{ |
1002 | | allocated_ptr->GetAllocatedData(), allocated_ptr->GetSize(), |
1003 | | allocated_ptr->GetAllocatedCapacity()}; |
1004 | | |
1005 | | IteratorValueAdapter<A, MoveIterator<A>> move_values( |
1006 | | MoveIterator<A>(inlined_ptr->GetInlinedData())); |
1007 | | |
1008 | | ABSL_INTERNAL_TRY { |
1009 | | ConstructElements<A>(inlined_ptr->GetAllocator(), |
1010 | | allocated_ptr->GetInlinedData(), move_values, |
1011 | | inlined_ptr->GetSize()); |
1012 | | } |
1013 | | ABSL_INTERNAL_CATCH_ANY { |
1014 | | allocated_ptr->SetAllocation(Allocation<A>{ |
1015 | | allocated_storage_view.data, allocated_storage_view.capacity}); |
1016 | | ABSL_INTERNAL_RETHROW; |
1017 | | } |
1018 | | |
1019 | | DestroyAdapter<A>::DestroyElements(inlined_ptr->GetAllocator(), |
1020 | | inlined_ptr->GetInlinedData(), |
1021 | | inlined_ptr->GetSize()); |
1022 | | |
1023 | | inlined_ptr->SetAllocation(Allocation<A>{allocated_storage_view.data, |
1024 | | allocated_storage_view.capacity}); |
1025 | | } |
1026 | | |
1027 | | swap(GetSizeAndIsAllocated(), other_storage_ptr->GetSizeAndIsAllocated()); |
1028 | | swap(GetAllocator(), other_storage_ptr->GetAllocator()); |
1029 | | } |
1030 | | |
1031 | | template <typename T, size_t N, typename A> |
1032 | | void Storage<T, N, A>::SwapN(ElementwiseSwapPolicy, Storage* other, |
1033 | | SizeType<A> n) { |
1034 | | std::swap_ranges(GetInlinedData(), GetInlinedData() + n, |
1035 | | other->GetInlinedData()); |
1036 | | } |
1037 | | |
1038 | | template <typename T, size_t N, typename A> |
1039 | | void Storage<T, N, A>::SwapN(ElementwiseConstructPolicy, Storage* other, |
1040 | | SizeType<A> n) { |
1041 | | Pointer<A> a = GetInlinedData(); |
1042 | | Pointer<A> b = other->GetInlinedData(); |
1043 | | // see note on allocators in `SwapInlinedElements`. |
1044 | | A& allocator_a = GetAllocator(); |
1045 | | A& allocator_b = other->GetAllocator(); |
1046 | | for (SizeType<A> i = 0; i < n; ++i, ++a, ++b) { |
1047 | | ValueType<A> tmp(std::move(*a)); |
1048 | | |
1049 | | AllocatorTraits<A>::destroy(allocator_a, a); |
1050 | | AllocatorTraits<A>::construct(allocator_b, a, std::move(*b)); |
1051 | | |
1052 | | AllocatorTraits<A>::destroy(allocator_b, b); |
1053 | | AllocatorTraits<A>::construct(allocator_a, b, std::move(tmp)); |
1054 | | } |
1055 | | } |
1056 | | |
1057 | | template <typename T, size_t N, typename A> |
1058 | | void Storage<T, N, A>::SwapInlinedElements(MemcpyPolicy, Storage* other) { |
1059 | | Data tmp = data_; |
1060 | | data_ = other->data_; |
1061 | | other->data_ = tmp; |
1062 | | } |
1063 | | |
1064 | | template <typename T, size_t N, typename A> |
1065 | | template <typename NotMemcpyPolicy> |
1066 | | void Storage<T, N, A>::SwapInlinedElements(NotMemcpyPolicy policy, |
1067 | | Storage* other) { |
1068 | | // Note: `destroy` needs to use pre-swap allocator while `construct` - |
1069 | | // post-swap allocator. Allocators will be swapped later on outside of |
1070 | | // `SwapInlinedElements`. |
1071 | | Storage* small_ptr = this; |
1072 | | Storage* large_ptr = other; |
1073 | | if (small_ptr->GetSize() > large_ptr->GetSize()) { |
1074 | | std::swap(small_ptr, large_ptr); |
1075 | | } |
1076 | | |
1077 | | auto small_size = small_ptr->GetSize(); |
1078 | | auto diff = large_ptr->GetSize() - small_size; |
1079 | | SwapN(policy, other, small_size); |
1080 | | |
1081 | | IteratorValueAdapter<A, MoveIterator<A>> move_values( |
1082 | | MoveIterator<A>(large_ptr->GetInlinedData() + small_size)); |
1083 | | |
1084 | | ConstructElements<A>(large_ptr->GetAllocator(), |
1085 | | small_ptr->GetInlinedData() + small_size, move_values, |
1086 | | diff); |
1087 | | |
1088 | | DestroyAdapter<A>::DestroyElements(large_ptr->GetAllocator(), |
1089 | | large_ptr->GetInlinedData() + small_size, |
1090 | | diff); |
1091 | | } |
1092 | | |
1093 | | // End ignore "array-bounds" |
1094 | | #if !defined(__clang__) && defined(__GNUC__) |
1095 | | #pragma GCC diagnostic pop |
1096 | | #endif |
1097 | | |
1098 | | } // namespace inlined_vector_internal |
1099 | | ABSL_NAMESPACE_END |
1100 | | } // namespace absl |
1101 | | |
1102 | | #endif // ABSL_CONTAINER_INTERNAL_INLINED_VECTOR_H_ |