/src/rapidjson/include/rapidjson/internal/stack.h
Line | Count | Source |
1 | | // Tencent is pleased to support the open source community by making RapidJSON available. |
2 | | // |
3 | | // Copyright (C) 2015 THL A29 Limited, a Tencent company, and Milo Yip. |
4 | | // |
5 | | // Licensed under the MIT License (the "License"); you may not use this file except |
6 | | // in compliance with the License. You may obtain a copy of the License at |
7 | | // |
8 | | // http://opensource.org/licenses/MIT |
9 | | // |
10 | | // Unless required by applicable law or agreed to in writing, software distributed |
11 | | // under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR |
12 | | // CONDITIONS OF ANY KIND, either express or implied. See the License for the |
13 | | // specific language governing permissions and limitations under the License. |
14 | | |
15 | | #ifndef RAPIDJSON_INTERNAL_STACK_H_ |
16 | | #define RAPIDJSON_INTERNAL_STACK_H_ |
17 | | |
18 | | #include "../allocators.h" |
19 | | #include "swap.h" |
20 | | #include <cstddef> |
21 | | |
22 | | #if defined(__clang__) |
23 | | RAPIDJSON_DIAG_PUSH |
24 | | RAPIDJSON_DIAG_OFF(c++98-compat) |
25 | | #endif |
26 | | |
27 | | RAPIDJSON_NAMESPACE_BEGIN |
28 | | namespace internal { |
29 | | |
30 | | /////////////////////////////////////////////////////////////////////////////// |
31 | | // Stack |
32 | | |
33 | | //! A type-unsafe stack for storing different types of data. |
34 | | /*! \tparam Allocator Allocator for allocating stack memory. |
35 | | */ |
36 | | template <typename Allocator> |
37 | | class Stack { |
38 | | public: |
39 | | // Optimization note: Do not allocate memory for stack_ in constructor. |
40 | | // Do it lazily when first Push() -> Expand() -> Resize(). |
41 | 96 | Stack(Allocator* allocator, size_t stackCapacity) : allocator_(allocator), ownAllocator_(0), stack_(0), stackTop_(0), stackEnd_(0), initialCapacity_(stackCapacity) { |
42 | 96 | } |
43 | | |
44 | | #if RAPIDJSON_HAS_CXX11_RVALUE_REFS |
45 | | Stack(Stack&& rhs) |
46 | | : allocator_(rhs.allocator_), |
47 | | ownAllocator_(rhs.ownAllocator_), |
48 | | stack_(rhs.stack_), |
49 | | stackTop_(rhs.stackTop_), |
50 | | stackEnd_(rhs.stackEnd_), |
51 | | initialCapacity_(rhs.initialCapacity_) |
52 | | { |
53 | | rhs.allocator_ = 0; |
54 | | rhs.ownAllocator_ = 0; |
55 | | rhs.stack_ = 0; |
56 | | rhs.stackTop_ = 0; |
57 | | rhs.stackEnd_ = 0; |
58 | | rhs.initialCapacity_ = 0; |
59 | | } |
60 | | #endif |
61 | | |
62 | 96 | ~Stack() { |
63 | 96 | Destroy(); |
64 | 96 | } |
65 | | |
66 | | #if RAPIDJSON_HAS_CXX11_RVALUE_REFS |
67 | | Stack& operator=(Stack&& rhs) { |
68 | | if (&rhs != this) |
69 | | { |
70 | | Destroy(); |
71 | | |
72 | | allocator_ = rhs.allocator_; |
73 | | ownAllocator_ = rhs.ownAllocator_; |
74 | | stack_ = rhs.stack_; |
75 | | stackTop_ = rhs.stackTop_; |
76 | | stackEnd_ = rhs.stackEnd_; |
77 | | initialCapacity_ = rhs.initialCapacity_; |
78 | | |
79 | | rhs.allocator_ = 0; |
80 | | rhs.ownAllocator_ = 0; |
81 | | rhs.stack_ = 0; |
82 | | rhs.stackTop_ = 0; |
83 | | rhs.stackEnd_ = 0; |
84 | | rhs.initialCapacity_ = 0; |
85 | | } |
86 | | return *this; |
87 | | } |
88 | | #endif |
89 | | |
90 | | void Swap(Stack& rhs) RAPIDJSON_NOEXCEPT { |
91 | | internal::Swap(allocator_, rhs.allocator_); |
92 | | internal::Swap(ownAllocator_, rhs.ownAllocator_); |
93 | | internal::Swap(stack_, rhs.stack_); |
94 | | internal::Swap(stackTop_, rhs.stackTop_); |
95 | | internal::Swap(stackEnd_, rhs.stackEnd_); |
96 | | internal::Swap(initialCapacity_, rhs.initialCapacity_); |
97 | | } |
98 | | |
99 | 88 | void Clear() { stackTop_ = stack_; } |
100 | | |
101 | 44 | void ShrinkToFit() { |
102 | 44 | if (Empty()) { |
103 | | // If the stack is empty, completely deallocate the memory. |
104 | 44 | Allocator::Free(stack_); // NOLINT (+clang-analyzer-unix.Malloc) |
105 | 44 | stack_ = 0; |
106 | 44 | stackTop_ = 0; |
107 | 44 | stackEnd_ = 0; |
108 | 44 | } |
109 | 0 | else |
110 | 0 | Resize(GetSize()); |
111 | 44 | } |
112 | | |
113 | | // Optimization note: try to minimize the size of this function for force inline. |
114 | | // Expansion is run very infrequently, so it is moved to another (probably non-inline) function. |
115 | | template<typename T> |
116 | 11.8M | RAPIDJSON_FORCEINLINE void Reserve(size_t count = 1) { |
117 | | // Expand the stack if needed |
118 | 11.8M | if (RAPIDJSON_UNLIKELY(static_cast<std::ptrdiff_t>(sizeof(T) * count) > (stackEnd_ - stackTop_))) |
119 | 250 | Expand<T>(count); |
120 | 11.8M | } void rapidjson::internal::Stack<rapidjson::CrtAllocator>::Reserve<rapidjson::GenericValue<rapidjson::UTF8<char>, rapidjson::MemoryPoolAllocator<rapidjson::CrtAllocator> > >(unsigned long) Line | Count | Source | 116 | 1.42M | RAPIDJSON_FORCEINLINE void Reserve(size_t count = 1) { | 117 | | // Expand the stack if needed | 118 | 1.42M | if (RAPIDJSON_UNLIKELY(static_cast<std::ptrdiff_t>(sizeof(T) * count) > (stackEnd_ - stackTop_))) | 119 | 104 | Expand<T>(count); | 120 | 1.42M | } |
void rapidjson::internal::Stack<rapidjson::CrtAllocator>::Reserve<char>(unsigned long) Line | Count | Source | 116 | 10.4M | RAPIDJSON_FORCEINLINE void Reserve(size_t count = 1) { | 117 | | // Expand the stack if needed | 118 | 10.4M | if (RAPIDJSON_UNLIKELY(static_cast<std::ptrdiff_t>(sizeof(T) * count) > (stackEnd_ - stackTop_))) | 119 | 146 | Expand<T>(count); | 120 | 10.4M | } |
Unexecuted instantiation: void rapidjson::internal::Stack<rapidjson::CrtAllocator>::Reserve<rapidjson::Writer<rapidjson::GenericStringBuffer<rapidjson::UTF8<char>, rapidjson::CrtAllocator>, rapidjson::UTF8<char>, rapidjson::UTF8<char>, rapidjson::CrtAllocator, 0u>::Level>(unsigned long) Unexecuted instantiation: void rapidjson::internal::Stack<rapidjson::CrtAllocator>::Reserve<unsigned int>(unsigned long) |
121 | | |
122 | | template<typename T> |
123 | 11.8M | RAPIDJSON_FORCEINLINE T* Push(size_t count = 1) { |
124 | 11.8M | Reserve<T>(count); |
125 | 11.8M | return PushUnsafe<T>(count); |
126 | 11.8M | } rapidjson::GenericValue<rapidjson::UTF8<char>, rapidjson::MemoryPoolAllocator<rapidjson::CrtAllocator> >* rapidjson::internal::Stack<rapidjson::CrtAllocator>::Push<rapidjson::GenericValue<rapidjson::UTF8<char>, rapidjson::MemoryPoolAllocator<rapidjson::CrtAllocator> > >(unsigned long) Line | Count | Source | 123 | 1.42M | RAPIDJSON_FORCEINLINE T* Push(size_t count = 1) { | 124 | 1.42M | Reserve<T>(count); | 125 | 1.42M | return PushUnsafe<T>(count); | 126 | 1.42M | } |
char* rapidjson::internal::Stack<rapidjson::CrtAllocator>::Push<char>(unsigned long) Line | Count | Source | 123 | 10.4M | RAPIDJSON_FORCEINLINE T* Push(size_t count = 1) { | 124 | 10.4M | Reserve<T>(count); | 125 | 10.4M | return PushUnsafe<T>(count); | 126 | 10.4M | } |
Unexecuted instantiation: rapidjson::Writer<rapidjson::GenericStringBuffer<rapidjson::UTF8<char>, rapidjson::CrtAllocator>, rapidjson::UTF8<char>, rapidjson::UTF8<char>, rapidjson::CrtAllocator, 0u>::Level* rapidjson::internal::Stack<rapidjson::CrtAllocator>::Push<rapidjson::Writer<rapidjson::GenericStringBuffer<rapidjson::UTF8<char>, rapidjson::CrtAllocator>, rapidjson::UTF8<char>, rapidjson::UTF8<char>, rapidjson::CrtAllocator, 0u>::Level>(unsigned long) Unexecuted instantiation: unsigned int* rapidjson::internal::Stack<rapidjson::CrtAllocator>::Push<unsigned int>(unsigned long) |
127 | | |
128 | | template<typename T> |
129 | 12.9M | RAPIDJSON_FORCEINLINE T* PushUnsafe(size_t count = 1) { |
130 | 12.9M | RAPIDJSON_ASSERT(stackTop_); |
131 | 12.9M | RAPIDJSON_ASSERT(static_cast<std::ptrdiff_t>(sizeof(T) * count) <= (stackEnd_ - stackTop_)); |
132 | 12.9M | T* ret = reinterpret_cast<T*>(stackTop_); |
133 | 12.9M | stackTop_ += sizeof(T) * count; |
134 | 12.9M | return ret; |
135 | 12.9M | } rapidjson::GenericValue<rapidjson::UTF8<char>, rapidjson::MemoryPoolAllocator<rapidjson::CrtAllocator> >* rapidjson::internal::Stack<rapidjson::CrtAllocator>::PushUnsafe<rapidjson::GenericValue<rapidjson::UTF8<char>, rapidjson::MemoryPoolAllocator<rapidjson::CrtAllocator> > >(unsigned long) Line | Count | Source | 129 | 1.42M | RAPIDJSON_FORCEINLINE T* PushUnsafe(size_t count = 1) { | 130 | 1.42M | RAPIDJSON_ASSERT(stackTop_); | 131 | 1.42M | RAPIDJSON_ASSERT(static_cast<std::ptrdiff_t>(sizeof(T) * count) <= (stackEnd_ - stackTop_)); | 132 | 1.42M | T* ret = reinterpret_cast<T*>(stackTop_); | 133 | 1.42M | stackTop_ += sizeof(T) * count; | 134 | 1.42M | return ret; | 135 | 1.42M | } |
char* rapidjson::internal::Stack<rapidjson::CrtAllocator>::PushUnsafe<char>(unsigned long) Line | Count | Source | 129 | 11.5M | RAPIDJSON_FORCEINLINE T* PushUnsafe(size_t count = 1) { | 130 | 11.5M | RAPIDJSON_ASSERT(stackTop_); | 131 | 11.5M | RAPIDJSON_ASSERT(static_cast<std::ptrdiff_t>(sizeof(T) * count) <= (stackEnd_ - stackTop_)); | 132 | 11.5M | T* ret = reinterpret_cast<T*>(stackTop_); | 133 | 11.5M | stackTop_ += sizeof(T) * count; | 134 | 11.5M | return ret; | 135 | 11.5M | } |
Unexecuted instantiation: rapidjson::Writer<rapidjson::GenericStringBuffer<rapidjson::UTF8<char>, rapidjson::CrtAllocator>, rapidjson::UTF8<char>, rapidjson::UTF8<char>, rapidjson::CrtAllocator, 0u>::Level* rapidjson::internal::Stack<rapidjson::CrtAllocator>::PushUnsafe<rapidjson::Writer<rapidjson::GenericStringBuffer<rapidjson::UTF8<char>, rapidjson::CrtAllocator>, rapidjson::UTF8<char>, rapidjson::UTF8<char>, rapidjson::CrtAllocator, 0u>::Level>(unsigned long) Unexecuted instantiation: unsigned int* rapidjson::internal::Stack<rapidjson::CrtAllocator>::PushUnsafe<unsigned int>(unsigned long) |
136 | | |
137 | | template<typename T> |
138 | 1.07M | T* Pop(size_t count) { |
139 | 1.07M | RAPIDJSON_ASSERT(GetSize() >= count * sizeof(T)); |
140 | 1.07M | stackTop_ -= count * sizeof(T); |
141 | 1.07M | return reinterpret_cast<T*>(stackTop_); |
142 | 1.07M | } char* rapidjson::internal::Stack<rapidjson::CrtAllocator>::Pop<char>(unsigned long) Line | Count | Source | 138 | 1.06M | T* Pop(size_t count) { | 139 | 1.06M | RAPIDJSON_ASSERT(GetSize() >= count * sizeof(T)); | 140 | 1.06M | stackTop_ -= count * sizeof(T); | 141 | 1.06M | return reinterpret_cast<T*>(stackTop_); | 142 | 1.06M | } |
rapidjson::GenericMember<rapidjson::UTF8<char>, rapidjson::MemoryPoolAllocator<rapidjson::CrtAllocator> >* rapidjson::internal::Stack<rapidjson::CrtAllocator>::Pop<rapidjson::GenericMember<rapidjson::UTF8<char>, rapidjson::MemoryPoolAllocator<rapidjson::CrtAllocator> > >(unsigned long) Line | Count | Source | 138 | 4 | T* Pop(size_t count) { | 139 | 4 | RAPIDJSON_ASSERT(GetSize() >= count * sizeof(T)); | 140 | 4 | stackTop_ -= count * sizeof(T); | 141 | 4 | return reinterpret_cast<T*>(stackTop_); | 142 | 4 | } |
rapidjson::GenericValue<rapidjson::UTF8<char>, rapidjson::MemoryPoolAllocator<rapidjson::CrtAllocator> >* rapidjson::internal::Stack<rapidjson::CrtAllocator>::Pop<rapidjson::GenericValue<rapidjson::UTF8<char>, rapidjson::MemoryPoolAllocator<rapidjson::CrtAllocator> > >(unsigned long) Line | Count | Source | 138 | 8.45k | T* Pop(size_t count) { | 139 | 8.45k | RAPIDJSON_ASSERT(GetSize() >= count * sizeof(T)); | 140 | 8.45k | stackTop_ -= count * sizeof(T); | 141 | 8.45k | return reinterpret_cast<T*>(stackTop_); | 142 | 8.45k | } |
Unexecuted instantiation: rapidjson::Writer<rapidjson::GenericStringBuffer<rapidjson::UTF8<char>, rapidjson::CrtAllocator>, rapidjson::UTF8<char>, rapidjson::UTF8<char>, rapidjson::CrtAllocator, 0u>::Level* rapidjson::internal::Stack<rapidjson::CrtAllocator>::Pop<rapidjson::Writer<rapidjson::GenericStringBuffer<rapidjson::UTF8<char>, rapidjson::CrtAllocator>, rapidjson::UTF8<char>, rapidjson::UTF8<char>, rapidjson::CrtAllocator, 0u>::Level>(unsigned long) Unexecuted instantiation: unsigned int* rapidjson::internal::Stack<rapidjson::CrtAllocator>::Pop<unsigned int>(unsigned long) |
143 | | |
144 | | template<typename T> |
145 | 8.45k | T* Top() { |
146 | 8.45k | RAPIDJSON_ASSERT(GetSize() >= sizeof(T)); |
147 | 8.45k | return reinterpret_cast<T*>(stackTop_ - sizeof(T)); |
148 | 8.45k | } rapidjson::GenericValue<rapidjson::UTF8<char>, rapidjson::MemoryPoolAllocator<rapidjson::CrtAllocator> >* rapidjson::internal::Stack<rapidjson::CrtAllocator>::Top<rapidjson::GenericValue<rapidjson::UTF8<char>, rapidjson::MemoryPoolAllocator<rapidjson::CrtAllocator> > >() Line | Count | Source | 145 | 8.45k | T* Top() { | 146 | 8.45k | RAPIDJSON_ASSERT(GetSize() >= sizeof(T)); | 147 | 8.45k | return reinterpret_cast<T*>(stackTop_ - sizeof(T)); | 148 | 8.45k | } |
Unexecuted instantiation: rapidjson::Writer<rapidjson::GenericStringBuffer<rapidjson::UTF8<char>, rapidjson::CrtAllocator>, rapidjson::UTF8<char>, rapidjson::UTF8<char>, rapidjson::CrtAllocator, 0u>::Level* rapidjson::internal::Stack<rapidjson::CrtAllocator>::Top<rapidjson::Writer<rapidjson::GenericStringBuffer<rapidjson::UTF8<char>, rapidjson::CrtAllocator>, rapidjson::UTF8<char>, rapidjson::UTF8<char>, rapidjson::CrtAllocator, 0u>::Level>() Unexecuted instantiation: unsigned int* rapidjson::internal::Stack<rapidjson::CrtAllocator>::Top<unsigned int>() |
149 | | |
150 | | template<typename T> |
151 | | const T* Top() const { |
152 | | RAPIDJSON_ASSERT(GetSize() >= sizeof(T)); |
153 | | return reinterpret_cast<T*>(stackTop_ - sizeof(T)); |
154 | | } |
155 | | |
156 | | template<typename T> |
157 | | T* End() { return reinterpret_cast<T*>(stackTop_); } |
158 | | |
159 | | template<typename T> |
160 | | const T* End() const { return reinterpret_cast<T*>(stackTop_); } |
161 | | |
162 | | template<typename T> |
163 | 4 | T* Bottom() { return reinterpret_cast<T*>(stack_); } |
164 | | |
165 | | template<typename T> |
166 | | const T* Bottom() const { return reinterpret_cast<T*>(stack_); } |
167 | | |
168 | 44 | bool HasAllocator() const { |
169 | 44 | return allocator_ != 0; |
170 | 44 | } |
171 | | |
172 | 0 | Allocator& GetAllocator() { |
173 | 0 | RAPIDJSON_ASSERT(allocator_); |
174 | 0 | return *allocator_; |
175 | 0 | } |
176 | | |
177 | 48 | bool Empty() const { return stackTop_ == stack_; } |
178 | 1.07M | size_t GetSize() const { return static_cast<size_t>(stackTop_ - stack_); } |
179 | 470 | size_t GetCapacity() const { return static_cast<size_t>(stackEnd_ - stack_); } |
180 | | |
181 | | private: |
182 | | template<typename T> |
183 | 250 | void Expand(size_t count) { |
184 | | // Only expand the capacity if the current stack exists. Otherwise just create a stack with initial capacity. |
185 | 250 | size_t newCapacity; |
186 | 250 | if (stack_ == 0) { |
187 | 30 | if (!allocator_) |
188 | 30 | ownAllocator_ = allocator_ = RAPIDJSON_NEW(Allocator)(); |
189 | 30 | newCapacity = initialCapacity_; |
190 | 220 | } else { |
191 | 220 | newCapacity = GetCapacity(); |
192 | 220 | newCapacity += (newCapacity + 1) / 2; |
193 | 220 | } |
194 | 250 | size_t newSize = GetSize() + sizeof(T) * count; |
195 | 250 | if (newCapacity < newSize) |
196 | 1 | newCapacity = newSize; |
197 | | |
198 | 250 | Resize(newCapacity); |
199 | 250 | } void rapidjson::internal::Stack<rapidjson::CrtAllocator>::Expand<rapidjson::GenericValue<rapidjson::UTF8<char>, rapidjson::MemoryPoolAllocator<rapidjson::CrtAllocator> > >(unsigned long) Line | Count | Source | 183 | 104 | void Expand(size_t count) { | 184 | | // Only expand the capacity if the current stack exists. Otherwise just create a stack with initial capacity. | 185 | 104 | size_t newCapacity; | 186 | 104 | if (stack_ == 0) { | 187 | 16 | if (!allocator_) | 188 | 16 | ownAllocator_ = allocator_ = RAPIDJSON_NEW(Allocator)(); | 189 | 16 | newCapacity = initialCapacity_; | 190 | 88 | } else { | 191 | 88 | newCapacity = GetCapacity(); | 192 | 88 | newCapacity += (newCapacity + 1) / 2; | 193 | 88 | } | 194 | 104 | size_t newSize = GetSize() + sizeof(T) * count; | 195 | 104 | if (newCapacity < newSize) | 196 | 0 | newCapacity = newSize; | 197 | | | 198 | 104 | Resize(newCapacity); | 199 | 104 | } |
void rapidjson::internal::Stack<rapidjson::CrtAllocator>::Expand<char>(unsigned long) Line | Count | Source | 183 | 146 | void Expand(size_t count) { | 184 | | // Only expand the capacity if the current stack exists. Otherwise just create a stack with initial capacity. | 185 | 146 | size_t newCapacity; | 186 | 146 | if (stack_ == 0) { | 187 | 14 | if (!allocator_) | 188 | 14 | ownAllocator_ = allocator_ = RAPIDJSON_NEW(Allocator)(); | 189 | 14 | newCapacity = initialCapacity_; | 190 | 132 | } else { | 191 | 132 | newCapacity = GetCapacity(); | 192 | 132 | newCapacity += (newCapacity + 1) / 2; | 193 | 132 | } | 194 | 146 | size_t newSize = GetSize() + sizeof(T) * count; | 195 | 146 | if (newCapacity < newSize) | 196 | 1 | newCapacity = newSize; | 197 | | | 198 | 146 | Resize(newCapacity); | 199 | 146 | } |
Unexecuted instantiation: void rapidjson::internal::Stack<rapidjson::CrtAllocator>::Expand<rapidjson::Writer<rapidjson::GenericStringBuffer<rapidjson::UTF8<char>, rapidjson::CrtAllocator>, rapidjson::UTF8<char>, rapidjson::UTF8<char>, rapidjson::CrtAllocator, 0u>::Level>(unsigned long) Unexecuted instantiation: void rapidjson::internal::Stack<rapidjson::CrtAllocator>::Expand<unsigned int>(unsigned long) |
200 | | |
201 | 250 | void Resize(size_t newCapacity) { |
202 | 250 | const size_t size = GetSize(); // Backup the current size |
203 | 250 | stack_ = static_cast<char*>(allocator_->Realloc(stack_, GetCapacity(), newCapacity)); |
204 | 250 | stackTop_ = stack_ + size; |
205 | 250 | stackEnd_ = stack_ + newCapacity; |
206 | 250 | } |
207 | | |
208 | 96 | void Destroy() { |
209 | 96 | Allocator::Free(stack_); |
210 | 96 | RAPIDJSON_DELETE(ownAllocator_); // Only delete if it is owned by the stack |
211 | 96 | } |
212 | | |
213 | | // Prohibit copy constructor & assignment operator. |
214 | | Stack(const Stack&); |
215 | | Stack& operator=(const Stack&); |
216 | | |
217 | | Allocator* allocator_; |
218 | | Allocator* ownAllocator_; |
219 | | char *stack_; |
220 | | char *stackTop_; |
221 | | char *stackEnd_; |
222 | | size_t initialCapacity_; |
223 | | }; |
224 | | |
225 | | } // namespace internal |
226 | | RAPIDJSON_NAMESPACE_END |
227 | | |
228 | | #if defined(__clang__) |
229 | | RAPIDJSON_DIAG_POP |
230 | | #endif |
231 | | |
232 | | #endif // RAPIDJSON_STACK_H_ |