1//===- llvm/ADT/SmallPtrSet.cpp - 'Normally small' pointer set ------------===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8//
9// This file implements the SmallPtrSet class. See SmallPtrSet.h for an
10// overview of the algorithm.
11//
12//===----------------------------------------------------------------------===//
13
14#include "llvm/ADT/SmallPtrSet.h"
15#include "llvm/ADT/DenseMapInfo.h"
16#include "llvm/ADT/STLExtras.h"
17#include "llvm/Support/MathExtras.h"
18#include "llvm/Support/MemAlloc.h"
19#include <algorithm>
20#include <cassert>
21#include <cstdlib>
22
23using namespace llvm;
24
25void SmallPtrSetImplBase::shrink_and_clear() {
26 assert(!isSmall() && "Can't shrink a small set!");
27 free(ptr: CurArray);
28
29 // Reduce the number of buckets.
30 unsigned Size = size();
31 CurArraySize = Size > 16 ? 1 << (Log2_32_Ceil(Value: Size) + 1) : 32;
32 NumEntries = NumTombstones = 0;
33
34 // Install the new array. Clear all the buckets to empty.
35 CurArray = (const void**)safe_malloc(Sz: sizeof(void*) * CurArraySize);
36
37 memset(s: CurArray, c: -1, n: CurArraySize*sizeof(void*));
38}
39
40std::pair<const void *const *, bool>
41SmallPtrSetImplBase::insert_imp_big(const void *Ptr) {
42 if (LLVM_UNLIKELY(size() * 4 >= CurArraySize * 3)) {
43 // If more than 3/4 of the array is full, grow.
44 Grow(NewSize: CurArraySize < 64 ? 128 : CurArraySize * 2);
45 } else if (LLVM_UNLIKELY(CurArraySize - NumEntries - NumTombstones <
46 CurArraySize / 8)) {
47 // If fewer of 1/8 of the array is empty (meaning that many are filled with
48 // tombstones), rehash.
49 Grow(NewSize: CurArraySize);
50 }
51
52 // Okay, we know we have space. Find a hash bucket.
53 const void **Bucket = const_cast<const void**>(FindBucketFor(Ptr));
54 if (*Bucket == Ptr)
55 return {Bucket, false}; // Already inserted, good.
56
57 // Otherwise, insert it!
58 if (*Bucket == getTombstoneMarker())
59 --NumTombstones;
60 ++NumEntries;
61 *Bucket = Ptr;
62 incrementEpoch();
63 return {Bucket, true};
64}
65
66const void *const *SmallPtrSetImplBase::doFind(const void *Ptr) const {
67 unsigned BucketNo =
68 DenseMapInfo<void *>::getHashValue(PtrVal: Ptr) & (CurArraySize - 1);
69 unsigned ProbeAmt = 1;
70 while (true) {
71 const void *const *Bucket = CurArray + BucketNo;
72 if (LLVM_LIKELY(*Bucket == Ptr))
73 return Bucket;
74 if (LLVM_LIKELY(*Bucket == getEmptyMarker()))
75 return nullptr;
76
77 // Otherwise, it's a hash collision or a tombstone, continue quadratic
78 // probing.
79 BucketNo += ProbeAmt++;
80 BucketNo &= CurArraySize - 1;
81 }
82}
83
84const void *const *SmallPtrSetImplBase::FindBucketFor(const void *Ptr) const {
85 unsigned Bucket = DenseMapInfo<void *>::getHashValue(PtrVal: Ptr) & (CurArraySize-1);
86 unsigned ArraySize = CurArraySize;
87 unsigned ProbeAmt = 1;
88 const void *const *Array = CurArray;
89 const void *const *Tombstone = nullptr;
90 while (true) {
91 // If we found an empty bucket, the pointer doesn't exist in the set.
92 // Return a tombstone if we've seen one so far, or the empty bucket if
93 // not.
94 if (LLVM_LIKELY(Array[Bucket] == getEmptyMarker()))
95 return Tombstone ? Tombstone : Array+Bucket;
96
97 // Found Ptr's bucket?
98 if (LLVM_LIKELY(Array[Bucket] == Ptr))
99 return Array+Bucket;
100
101 // If this is a tombstone, remember it. If Ptr ends up not in the set, we
102 // prefer to return it than something that would require more probing.
103 if (Array[Bucket] == getTombstoneMarker() && !Tombstone)
104 Tombstone = Array+Bucket; // Remember the first tombstone found.
105
106 // It's a hash collision or a tombstone. Reprobe.
107 Bucket = (Bucket + ProbeAmt++) & (ArraySize-1);
108 }
109}
110
111/// Grow - Allocate a larger backing store for the buckets and move it over.
112///
113void SmallPtrSetImplBase::Grow(unsigned NewSize) {
114 auto OldBuckets = buckets();
115 bool WasSmall = isSmall();
116
117 // Install the new array. Clear all the buckets to empty.
118 const void **NewBuckets = (const void**) safe_malloc(Sz: sizeof(void*) * NewSize);
119
120 // Reset member only if memory was allocated successfully
121 CurArray = NewBuckets;
122 CurArraySize = NewSize;
123 memset(s: CurArray, c: -1, n: NewSize*sizeof(void*));
124
125 // Copy over all valid entries.
126 for (const void *&Bucket : OldBuckets) {
127 // Copy over the element if it is valid.
128 if (Bucket != getTombstoneMarker() && Bucket != getEmptyMarker())
129 *const_cast<void **>(FindBucketFor(Ptr: Bucket)) = const_cast<void *>(Bucket);
130 }
131
132 if (!WasSmall)
133 free(ptr: OldBuckets.begin());
134 NumTombstones = 0;
135 IsSmall = false;
136}
137
138SmallPtrSetImplBase::SmallPtrSetImplBase(const void **SmallStorage,
139 const SmallPtrSetImplBase &that) {
140 IsSmall = that.isSmall();
141 if (IsSmall) {
142 // If we're becoming small, prepare to insert into our stack space
143 CurArray = SmallStorage;
144 } else {
145 // Otherwise, allocate new heap space (unless we were the same size)
146 CurArray = (const void**)safe_malloc(Sz: sizeof(void*) * that.CurArraySize);
147 }
148
149 // Copy over the that array.
150 copyHelper(RHS: that);
151}
152
153SmallPtrSetImplBase::SmallPtrSetImplBase(const void **SmallStorage,
154 unsigned SmallSize,
155 const void **RHSSmallStorage,
156 SmallPtrSetImplBase &&that) {
157 moveHelper(SmallStorage, SmallSize, RHSSmallStorage, RHS: std::move(that));
158}
159
160void SmallPtrSetImplBase::copyFrom(const void **SmallStorage,
161 const SmallPtrSetImplBase &RHS) {
162 assert(&RHS != this && "Self-copy should be handled by the caller.");
163
164 if (isSmall() && RHS.isSmall())
165 assert(CurArraySize == RHS.CurArraySize &&
166 "Cannot assign sets with different small sizes");
167
168 // If we're becoming small, prepare to insert into our stack space
169 if (RHS.isSmall()) {
170 if (!isSmall())
171 free(ptr: CurArray);
172 CurArray = SmallStorage;
173 IsSmall = true;
174 // Otherwise, allocate new heap space (unless we were the same size)
175 } else if (CurArraySize != RHS.CurArraySize) {
176 if (isSmall())
177 CurArray = (const void**)safe_malloc(Sz: sizeof(void*) * RHS.CurArraySize);
178 else {
179 const void **T = (const void**)safe_realloc(Ptr: CurArray,
180 Sz: sizeof(void*) * RHS.CurArraySize);
181 CurArray = T;
182 }
183 IsSmall = false;
184 }
185
186 copyHelper(RHS);
187}
188
189void SmallPtrSetImplBase::copyHelper(const SmallPtrSetImplBase &RHS) {
190 // Copy over the new array size
191 CurArraySize = RHS.CurArraySize;
192
193 // Copy over the contents from the other set
194 llvm::copy(Range: RHS.buckets(), Out: CurArray);
195
196 NumEntries = RHS.NumEntries;
197 NumTombstones = RHS.NumTombstones;
198}
199
200void SmallPtrSetImplBase::moveFrom(const void **SmallStorage,
201 unsigned SmallSize,
202 const void **RHSSmallStorage,
203 SmallPtrSetImplBase &&RHS) {
204 if (!isSmall())
205 free(ptr: CurArray);
206 moveHelper(SmallStorage, SmallSize, RHSSmallStorage, RHS: std::move(RHS));
207}
208
209void SmallPtrSetImplBase::moveHelper(const void **SmallStorage,
210 unsigned SmallSize,
211 const void **RHSSmallStorage,
212 SmallPtrSetImplBase &&RHS) {
213 assert(&RHS != this && "Self-move should be handled by the caller.");
214
215 if (RHS.isSmall()) {
216 // Copy a small RHS rather than moving.
217 CurArray = SmallStorage;
218 llvm::copy(Range: RHS.small_buckets(), Out: CurArray);
219 } else {
220 CurArray = RHS.CurArray;
221 RHS.CurArray = RHSSmallStorage;
222 }
223
224 // Copy the rest of the trivial members.
225 CurArraySize = RHS.CurArraySize;
226 NumEntries = RHS.NumEntries;
227 NumTombstones = RHS.NumTombstones;
228 IsSmall = RHS.IsSmall;
229
230 // Make the RHS small and empty.
231 RHS.CurArraySize = SmallSize;
232 RHS.NumEntries = 0;
233 RHS.NumTombstones = 0;
234 RHS.IsSmall = true;
235}
236
237void SmallPtrSetImplBase::swap(const void **SmallStorage,
238 const void **RHSSmallStorage,
239 SmallPtrSetImplBase &RHS) {
240 if (this == &RHS) return;
241
242 // We can only avoid copying elements if neither set is small.
243 if (!this->isSmall() && !RHS.isSmall()) {
244 std::swap(a&: this->CurArray, b&: RHS.CurArray);
245 std::swap(a&: this->CurArraySize, b&: RHS.CurArraySize);
246 std::swap(a&: this->NumEntries, b&: RHS.NumEntries);
247 std::swap(a&: this->NumTombstones, b&: RHS.NumTombstones);
248 return;
249 }
250
251 // FIXME: From here on we assume that both sets have the same small size.
252
253 // Both a small, just swap the small elements.
254 if (this->isSmall() && RHS.isSmall()) {
255 unsigned MinEntries = std::min(a: this->NumEntries, b: RHS.NumEntries);
256 std::swap_ranges(first1: this->CurArray, last1: this->CurArray + MinEntries, first2: RHS.CurArray);
257 if (this->NumEntries > MinEntries) {
258 std::copy(first: this->CurArray + MinEntries, last: this->CurArray + this->NumEntries,
259 result: RHS.CurArray + MinEntries);
260 } else {
261 std::copy(first: RHS.CurArray + MinEntries, last: RHS.CurArray + RHS.NumEntries,
262 result: this->CurArray + MinEntries);
263 }
264 assert(this->CurArraySize == RHS.CurArraySize);
265 std::swap(a&: this->NumEntries, b&: RHS.NumEntries);
266 std::swap(a&: this->NumTombstones, b&: RHS.NumTombstones);
267 return;
268 }
269
270 // If only one side is small, copy the small elements into the large side and
271 // move the pointer from the large side to the small side.
272 SmallPtrSetImplBase &SmallSide = this->isSmall() ? *this : RHS;
273 SmallPtrSetImplBase &LargeSide = this->isSmall() ? RHS : *this;
274 const void **LargeSideInlineStorage =
275 this->isSmall() ? RHSSmallStorage : SmallStorage;
276 llvm::copy(Range: SmallSide.small_buckets(), Out: LargeSideInlineStorage);
277 std::swap(a&: LargeSide.CurArraySize, b&: SmallSide.CurArraySize);
278 std::swap(a&: LargeSide.NumEntries, b&: SmallSide.NumEntries);
279 std::swap(a&: LargeSide.NumTombstones, b&: SmallSide.NumTombstones);
280 SmallSide.CurArray = LargeSide.CurArray;
281 SmallSide.IsSmall = false;
282 LargeSide.CurArray = LargeSideInlineStorage;
283 LargeSide.IsSmall = true;
284}
285