Coverage Report

Created: 2023-02-22 06:51

/src/hermes/external/llvh/lib/Support/SmallPtrSet.cpp
Line
Count
Source (jump to first uncovered line)
1
//===- llvm/ADT/SmallPtrSet.cpp - 'Normally small' pointer set ------------===//
2
//
3
//                     The LLVM Compiler Infrastructure
4
//
5
// This file is distributed under the University of Illinois Open Source
6
// License. See LICENSE.TXT for details.
7
//
8
//===----------------------------------------------------------------------===//
9
//
10
// This file implements the SmallPtrSet class.  See SmallPtrSet.h for an
11
// overview of the algorithm.
12
//
13
//===----------------------------------------------------------------------===//
14
15
#include "llvh/ADT/SmallPtrSet.h"
16
#include "llvh/ADT/DenseMapInfo.h"
17
#include "llvh/Support/MathExtras.h"
18
#include "llvh/Support/ErrorHandling.h"
19
#include <algorithm>
20
#include <cassert>
21
#include <cstdlib>
22
23
using namespace llvh;
24
25
0
void SmallPtrSetImplBase::shrink_and_clear() {
26
0
  assert(!isSmall() && "Can't shrink a small set!");
27
0
  free(CurArray);
28
29
  // Reduce the number of buckets.
30
0
  unsigned Size = size();
31
0
  CurArraySize = Size > 16 ? 1 << (Log2_32_Ceil(Size) + 1) : 32;
32
0
  NumNonEmpty = NumTombstones = 0;
33
34
  // Install the new array.  Clear all the buckets to empty.
35
0
  CurArray = (const void**)safe_malloc(sizeof(void*) * CurArraySize);
36
37
0
  memset(CurArray, -1, CurArraySize*sizeof(void*));
38
0
}
39
40
std::pair<const void *const *, bool>
41
157k
SmallPtrSetImplBase::insert_imp_big(const void *Ptr) {
42
157k
  if (LLVM_UNLIKELY(size() * 4 >= CurArraySize * 3)) {
43
    // If more than 3/4 of the array is full, grow.
44
80
    Grow(CurArraySize < 64 ? 128 : CurArraySize * 2);
45
157k
  } else if (LLVM_UNLIKELY(CurArraySize - NumNonEmpty < CurArraySize / 8)) {
46
    // If fewer of 1/8 of the array is empty (meaning that many are filled with
47
    // tombstones), rehash.
48
0
    Grow(CurArraySize);
49
0
  }
50
51
  // Okay, we know we have space.  Find a hash bucket.
52
157k
  const void **Bucket = const_cast<const void**>(FindBucketFor(Ptr));
53
157k
  if (*Bucket == Ptr)
54
264
    return std::make_pair(Bucket, false); // Already inserted, good.
55
56
  // Otherwise, insert it!
57
157k
  if (*Bucket == getTombstoneMarker())
58
0
    --NumTombstones;
59
157k
  else
60
157k
    ++NumNonEmpty; // Track density.
61
157k
  *Bucket = Ptr;
62
157k
  incrementEpoch();
63
157k
  return std::make_pair(Bucket, true);
64
157k
}
65
66
393k
const void * const *SmallPtrSetImplBase::FindBucketFor(const void *Ptr) const {
67
393k
  unsigned Bucket = DenseMapInfo<void *>::getHashValue(Ptr) & (CurArraySize-1);
68
393k
  unsigned ArraySize = CurArraySize;
69
393k
  unsigned ProbeAmt = 1;
70
393k
  const void *const *Array = CurArray;
71
393k
  const void *const *Tombstone = nullptr;
72
708k
  while (true) {
73
    // If we found an empty bucket, the pointer doesn't exist in the set.
74
    // Return a tombstone if we've seen one so far, or the empty bucket if
75
    // not.
76
708k
    if (LLVM_LIKELY(Array[Bucket] == getEmptyMarker()))
77
353k
      return Tombstone ? Tombstone : Array+Bucket;
78
79
    // Found Ptr's bucket?
80
355k
    if (LLVM_LIKELY(Array[Bucket] == Ptr))
81
39.5k
      return Array+Bucket;
82
83
    // If this is a tombstone, remember it.  If Ptr ends up not in the set, we
84
    // prefer to return it than something that would require more probing.
85
315k
    if (Array[Bucket] == getTombstoneMarker() && !Tombstone)
86
0
      Tombstone = Array+Bucket;  // Remember the first tombstone found.
87
88
    // It's a hash collision or a tombstone. Reprobe.
89
315k
    Bucket = (Bucket + ProbeAmt++) & (ArraySize-1);
90
315k
  }
91
393k
}
92
93
/// Grow - Allocate a larger backing store for the buckets and move it over.
94
///
95
80
void SmallPtrSetImplBase::Grow(unsigned NewSize) {
96
80
  const void **OldBuckets = CurArray;
97
80
  const void **OldEnd = EndPointer();
98
80
  bool WasSmall = isSmall();
99
100
  // Install the new array.  Clear all the buckets to empty.
101
80
  const void **NewBuckets = (const void**) safe_malloc(sizeof(void*) * NewSize);
102
103
  // Reset member only if memory was allocated successfully
104
80
  CurArray = NewBuckets;
105
80
  CurArraySize = NewSize;
106
80
  memset(CurArray, -1, NewSize*sizeof(void*));
107
108
  // Copy over all valid entries.
109
261k
  for (const void **BucketPtr = OldBuckets; BucketPtr != OldEnd; ++BucketPtr) {
110
    // Copy over the element if it is valid.
111
261k
    const void *Elt = *BucketPtr;
112
261k
    if (Elt != getTombstoneMarker() && Elt != getEmptyMarker())
113
196k
      *const_cast<void**>(FindBucketFor(Elt)) = const_cast<void*>(Elt);
114
261k
  }
115
116
80
  if (!WasSmall)
117
64
    free(OldBuckets);
118
80
  NumNonEmpty -= NumTombstones;
119
80
  NumTombstones = 0;
120
80
}
121
122
SmallPtrSetImplBase::SmallPtrSetImplBase(const void **SmallStorage,
123
0
                                         const SmallPtrSetImplBase &that) {
124
0
  SmallArray = SmallStorage;
125
126
  // If we're becoming small, prepare to insert into our stack space
127
0
  if (that.isSmall()) {
128
0
    CurArray = SmallArray;
129
  // Otherwise, allocate new heap space (unless we were the same size)
130
0
  } else {
131
0
    CurArray = (const void**)safe_malloc(sizeof(void*) * that.CurArraySize);
132
0
  }
133
134
  // Copy over the that array.
135
0
  CopyHelper(that);
136
0
}
137
138
SmallPtrSetImplBase::SmallPtrSetImplBase(const void **SmallStorage,
139
                                         unsigned SmallSize,
140
14
                                         SmallPtrSetImplBase &&that) {
141
14
  SmallArray = SmallStorage;
142
14
  MoveHelper(SmallSize, std::move(that));
143
14
}
144
145
0
void SmallPtrSetImplBase::CopyFrom(const SmallPtrSetImplBase &RHS) {
146
0
  assert(&RHS != this && "Self-copy should be handled by the caller.");
147
148
0
  if (isSmall() && RHS.isSmall())
149
0
    assert(CurArraySize == RHS.CurArraySize &&
150
0
           "Cannot assign sets with different small sizes");
151
152
  // If we're becoming small, prepare to insert into our stack space
153
0
  if (RHS.isSmall()) {
154
0
    if (!isSmall())
155
0
      free(CurArray);
156
0
    CurArray = SmallArray;
157
  // Otherwise, allocate new heap space (unless we were the same size)
158
0
  } else if (CurArraySize != RHS.CurArraySize) {
159
0
    if (isSmall())
160
0
      CurArray = (const void**)safe_malloc(sizeof(void*) * RHS.CurArraySize);
161
0
    else {
162
0
      const void **T = (const void**)safe_realloc(CurArray,
163
0
                                             sizeof(void*) * RHS.CurArraySize);
164
0
      CurArray = T;
165
0
    }
166
0
  }
167
168
0
  CopyHelper(RHS);
169
0
}
170
171
0
void SmallPtrSetImplBase::CopyHelper(const SmallPtrSetImplBase &RHS) {
172
  // Copy over the new array size
173
0
  CurArraySize = RHS.CurArraySize;
174
175
  // Copy over the contents from the other set
176
0
  std::copy(RHS.CurArray, RHS.EndPointer(), CurArray);
177
178
0
  NumNonEmpty = RHS.NumNonEmpty;
179
0
  NumTombstones = RHS.NumTombstones;
180
0
}
181
182
void SmallPtrSetImplBase::MoveFrom(unsigned SmallSize,
183
0
                                   SmallPtrSetImplBase &&RHS) {
184
0
  if (!isSmall())
185
0
    free(CurArray);
186
0
  MoveHelper(SmallSize, std::move(RHS));
187
0
}
188
189
void SmallPtrSetImplBase::MoveHelper(unsigned SmallSize,
190
14
                                     SmallPtrSetImplBase &&RHS) {
191
14
  assert(&RHS != this && "Self-move should be handled by the caller.");
192
193
14
  if (RHS.isSmall()) {
194
    // Copy a small RHS rather than moving.
195
14
    CurArray = SmallArray;
196
14
    std::copy(RHS.CurArray, RHS.CurArray + RHS.NumNonEmpty, CurArray);
197
14
  } else {
198
0
    CurArray = RHS.CurArray;
199
0
    RHS.CurArray = RHS.SmallArray;
200
0
  }
201
202
  // Copy the rest of the trivial members.
203
14
  CurArraySize = RHS.CurArraySize;
204
14
  NumNonEmpty = RHS.NumNonEmpty;
205
14
  NumTombstones = RHS.NumTombstones;
206
207
  // Make the RHS small and empty.
208
14
  RHS.CurArraySize = SmallSize;
209
14
  assert(RHS.CurArray == RHS.SmallArray);
210
0
  RHS.NumNonEmpty = 0;
211
14
  RHS.NumTombstones = 0;
212
14
}
213
214
0
void SmallPtrSetImplBase::swap(SmallPtrSetImplBase &RHS) {
215
0
  if (this == &RHS) return;
216
217
  // We can only avoid copying elements if neither set is small.
218
0
  if (!this->isSmall() && !RHS.isSmall()) {
219
0
    std::swap(this->CurArray, RHS.CurArray);
220
0
    std::swap(this->CurArraySize, RHS.CurArraySize);
221
0
    std::swap(this->NumNonEmpty, RHS.NumNonEmpty);
222
0
    std::swap(this->NumTombstones, RHS.NumTombstones);
223
0
    return;
224
0
  }
225
226
  // FIXME: From here on we assume that both sets have the same small size.
227
228
  // If only RHS is small, copy the small elements into LHS and move the pointer
229
  // from LHS to RHS.
230
0
  if (!this->isSmall() && RHS.isSmall()) {
231
0
    assert(RHS.CurArray == RHS.SmallArray);
232
0
    std::copy(RHS.CurArray, RHS.CurArray + RHS.NumNonEmpty, this->SmallArray);
233
0
    std::swap(RHS.CurArraySize, this->CurArraySize);
234
0
    std::swap(this->NumNonEmpty, RHS.NumNonEmpty);
235
0
    std::swap(this->NumTombstones, RHS.NumTombstones);
236
0
    RHS.CurArray = this->CurArray;
237
0
    this->CurArray = this->SmallArray;
238
0
    return;
239
0
  }
240
241
  // If only LHS is small, copy the small elements into RHS and move the pointer
242
  // from RHS to LHS.
243
0
  if (this->isSmall() && !RHS.isSmall()) {
244
0
    assert(this->CurArray == this->SmallArray);
245
0
    std::copy(this->CurArray, this->CurArray + this->NumNonEmpty,
246
0
              RHS.SmallArray);
247
0
    std::swap(RHS.CurArraySize, this->CurArraySize);
248
0
    std::swap(RHS.NumNonEmpty, this->NumNonEmpty);
249
0
    std::swap(RHS.NumTombstones, this->NumTombstones);
250
0
    this->CurArray = RHS.CurArray;
251
0
    RHS.CurArray = RHS.SmallArray;
252
0
    return;
253
0
  }
254
255
  // Both a small, just swap the small elements.
256
0
  assert(this->isSmall() && RHS.isSmall());
257
0
  unsigned MinNonEmpty = std::min(this->NumNonEmpty, RHS.NumNonEmpty);
258
0
  std::swap_ranges(this->SmallArray, this->SmallArray + MinNonEmpty,
259
0
                   RHS.SmallArray);
260
0
  if (this->NumNonEmpty > MinNonEmpty) {
261
0
    std::copy(this->SmallArray + MinNonEmpty,
262
0
              this->SmallArray + this->NumNonEmpty,
263
0
              RHS.SmallArray + MinNonEmpty);
264
0
  } else {
265
0
    std::copy(RHS.SmallArray + MinNonEmpty, RHS.SmallArray + RHS.NumNonEmpty,
266
0
              this->SmallArray + MinNonEmpty);
267
0
  }
268
0
  assert(this->CurArraySize == RHS.CurArraySize);
269
0
  std::swap(this->NumNonEmpty, RHS.NumNonEmpty);
270
0
  std::swap(this->NumTombstones, RHS.NumTombstones);
271
0
}