1 | //===- llvm/ADT/SmallPtrSet.cpp - 'Normally small' pointer set ------------===// |
2 | // |
3 | // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. |
4 | // See https://llvm.org/LICENSE.txt for license information. |
5 | // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception |
6 | // |
7 | //===----------------------------------------------------------------------===// |
8 | // |
9 | // This file implements the SmallPtrSet class. See SmallPtrSet.h for an |
10 | // overview of the algorithm. |
11 | // |
12 | //===----------------------------------------------------------------------===// |
13 | |
14 | #include "llvm/ADT/SmallPtrSet.h" |
15 | #include "llvm/ADT/DenseMapInfo.h" |
16 | #include "llvm/Support/MathExtras.h" |
17 | #include "llvm/Support/MemAlloc.h" |
18 | #include <algorithm> |
19 | #include <cassert> |
20 | #include <cstdlib> |
21 | |
22 | using namespace llvm; |
23 | |
24 | void SmallPtrSetImplBase::shrink_and_clear() { |
25 | assert(!isSmall() && "Can't shrink a small set!" ); |
26 | free(ptr: CurArray); |
27 | |
28 | // Reduce the number of buckets. |
29 | unsigned Size = size(); |
30 | CurArraySize = Size > 16 ? 1 << (Log2_32_Ceil(Value: Size) + 1) : 32; |
31 | NumNonEmpty = NumTombstones = 0; |
32 | |
33 | // Install the new array. Clear all the buckets to empty. |
34 | CurArray = (const void**)safe_malloc(Sz: sizeof(void*) * CurArraySize); |
35 | |
36 | memset(s: CurArray, c: -1, n: CurArraySize*sizeof(void*)); |
37 | } |
38 | |
39 | std::pair<const void *const *, bool> |
40 | SmallPtrSetImplBase::insert_imp_big(const void *Ptr) { |
41 | if (LLVM_UNLIKELY(size() * 4 >= CurArraySize * 3)) { |
42 | // If more than 3/4 of the array is full, grow. |
43 | Grow(NewSize: CurArraySize < 64 ? 128 : CurArraySize * 2); |
44 | } else if (LLVM_UNLIKELY(CurArraySize - NumNonEmpty < CurArraySize / 8)) { |
45 | // If fewer of 1/8 of the array is empty (meaning that many are filled with |
46 | // tombstones), rehash. |
47 | Grow(NewSize: CurArraySize); |
48 | } |
49 | |
50 | // Okay, we know we have space. Find a hash bucket. |
51 | const void **Bucket = const_cast<const void**>(FindBucketFor(Ptr)); |
52 | if (*Bucket == Ptr) |
53 | return std::make_pair(x&: Bucket, y: false); // Already inserted, good. |
54 | |
55 | // Otherwise, insert it! |
56 | if (*Bucket == getTombstoneMarker()) |
57 | --NumTombstones; |
58 | else |
59 | ++NumNonEmpty; // Track density. |
60 | *Bucket = Ptr; |
61 | incrementEpoch(); |
62 | return std::make_pair(x&: Bucket, y: true); |
63 | } |
64 | |
65 | const void *const *SmallPtrSetImplBase::doFind(const void *Ptr) const { |
66 | unsigned BucketNo = |
67 | DenseMapInfo<void *>::getHashValue(PtrVal: Ptr) & (CurArraySize - 1); |
68 | unsigned ProbeAmt = 1; |
69 | while (true) { |
70 | const void *const *Bucket = CurArray + BucketNo; |
71 | if (LLVM_LIKELY(*Bucket == Ptr)) |
72 | return Bucket; |
73 | if (LLVM_LIKELY(*Bucket == getEmptyMarker())) |
74 | return nullptr; |
75 | |
76 | // Otherwise, it's a hash collision or a tombstone, continue quadratic |
77 | // probing. |
78 | BucketNo += ProbeAmt++; |
79 | BucketNo &= CurArraySize - 1; |
80 | } |
81 | } |
82 | |
83 | const void *const *SmallPtrSetImplBase::FindBucketFor(const void *Ptr) const { |
84 | unsigned Bucket = DenseMapInfo<void *>::getHashValue(PtrVal: Ptr) & (CurArraySize-1); |
85 | unsigned ArraySize = CurArraySize; |
86 | unsigned ProbeAmt = 1; |
87 | const void *const *Array = CurArray; |
88 | const void *const *Tombstone = nullptr; |
89 | while (true) { |
90 | // If we found an empty bucket, the pointer doesn't exist in the set. |
91 | // Return a tombstone if we've seen one so far, or the empty bucket if |
92 | // not. |
93 | if (LLVM_LIKELY(Array[Bucket] == getEmptyMarker())) |
94 | return Tombstone ? Tombstone : Array+Bucket; |
95 | |
96 | // Found Ptr's bucket? |
97 | if (LLVM_LIKELY(Array[Bucket] == Ptr)) |
98 | return Array+Bucket; |
99 | |
100 | // If this is a tombstone, remember it. If Ptr ends up not in the set, we |
101 | // prefer to return it than something that would require more probing. |
102 | if (Array[Bucket] == getTombstoneMarker() && !Tombstone) |
103 | Tombstone = Array+Bucket; // Remember the first tombstone found. |
104 | |
105 | // It's a hash collision or a tombstone. Reprobe. |
106 | Bucket = (Bucket + ProbeAmt++) & (ArraySize-1); |
107 | } |
108 | } |
109 | |
110 | /// Grow - Allocate a larger backing store for the buckets and move it over. |
111 | /// |
112 | void SmallPtrSetImplBase::Grow(unsigned NewSize) { |
113 | const void **OldBuckets = CurArray; |
114 | const void **OldEnd = EndPointer(); |
115 | bool WasSmall = isSmall(); |
116 | |
117 | // Install the new array. Clear all the buckets to empty. |
118 | const void **NewBuckets = (const void**) safe_malloc(Sz: sizeof(void*) * NewSize); |
119 | |
120 | // Reset member only if memory was allocated successfully |
121 | CurArray = NewBuckets; |
122 | CurArraySize = NewSize; |
123 | memset(s: CurArray, c: -1, n: NewSize*sizeof(void*)); |
124 | |
125 | // Copy over all valid entries. |
126 | for (const void **BucketPtr = OldBuckets; BucketPtr != OldEnd; ++BucketPtr) { |
127 | // Copy over the element if it is valid. |
128 | const void *Elt = *BucketPtr; |
129 | if (Elt != getTombstoneMarker() && Elt != getEmptyMarker()) |
130 | *const_cast<void**>(FindBucketFor(Ptr: Elt)) = const_cast<void*>(Elt); |
131 | } |
132 | |
133 | if (!WasSmall) |
134 | free(ptr: OldBuckets); |
135 | NumNonEmpty -= NumTombstones; |
136 | NumTombstones = 0; |
137 | IsSmall = false; |
138 | } |
139 | |
140 | SmallPtrSetImplBase::SmallPtrSetImplBase(const void **SmallStorage, |
141 | const SmallPtrSetImplBase &that) { |
142 | IsSmall = that.isSmall(); |
143 | if (IsSmall) { |
144 | // If we're becoming small, prepare to insert into our stack space |
145 | CurArray = SmallStorage; |
146 | } else { |
147 | // Otherwise, allocate new heap space (unless we were the same size) |
148 | CurArray = (const void**)safe_malloc(Sz: sizeof(void*) * that.CurArraySize); |
149 | } |
150 | |
151 | // Copy over the that array. |
152 | copyHelper(RHS: that); |
153 | } |
154 | |
155 | SmallPtrSetImplBase::SmallPtrSetImplBase(const void **SmallStorage, |
156 | unsigned SmallSize, |
157 | const void **RHSSmallStorage, |
158 | SmallPtrSetImplBase &&that) { |
159 | moveHelper(SmallStorage, SmallSize, RHSSmallStorage, RHS: std::move(that)); |
160 | } |
161 | |
162 | void SmallPtrSetImplBase::copyFrom(const void **SmallStorage, |
163 | const SmallPtrSetImplBase &RHS) { |
164 | assert(&RHS != this && "Self-copy should be handled by the caller." ); |
165 | |
166 | if (isSmall() && RHS.isSmall()) |
167 | assert(CurArraySize == RHS.CurArraySize && |
168 | "Cannot assign sets with different small sizes" ); |
169 | |
170 | // If we're becoming small, prepare to insert into our stack space |
171 | if (RHS.isSmall()) { |
172 | if (!isSmall()) |
173 | free(ptr: CurArray); |
174 | CurArray = SmallStorage; |
175 | IsSmall = true; |
176 | // Otherwise, allocate new heap space (unless we were the same size) |
177 | } else if (CurArraySize != RHS.CurArraySize) { |
178 | if (isSmall()) |
179 | CurArray = (const void**)safe_malloc(Sz: sizeof(void*) * RHS.CurArraySize); |
180 | else { |
181 | const void **T = (const void**)safe_realloc(Ptr: CurArray, |
182 | Sz: sizeof(void*) * RHS.CurArraySize); |
183 | CurArray = T; |
184 | } |
185 | IsSmall = false; |
186 | } |
187 | |
188 | copyHelper(RHS); |
189 | } |
190 | |
191 | void SmallPtrSetImplBase::copyHelper(const SmallPtrSetImplBase &RHS) { |
192 | // Copy over the new array size |
193 | CurArraySize = RHS.CurArraySize; |
194 | |
195 | // Copy over the contents from the other set |
196 | std::copy(first: RHS.CurArray, last: RHS.EndPointer(), result: CurArray); |
197 | |
198 | NumNonEmpty = RHS.NumNonEmpty; |
199 | NumTombstones = RHS.NumTombstones; |
200 | } |
201 | |
202 | void SmallPtrSetImplBase::moveFrom(const void **SmallStorage, |
203 | unsigned SmallSize, |
204 | const void **RHSSmallStorage, |
205 | SmallPtrSetImplBase &&RHS) { |
206 | if (!isSmall()) |
207 | free(ptr: CurArray); |
208 | moveHelper(SmallStorage, SmallSize, RHSSmallStorage, RHS: std::move(RHS)); |
209 | } |
210 | |
211 | void SmallPtrSetImplBase::moveHelper(const void **SmallStorage, |
212 | unsigned SmallSize, |
213 | const void **RHSSmallStorage, |
214 | SmallPtrSetImplBase &&RHS) { |
215 | assert(&RHS != this && "Self-move should be handled by the caller." ); |
216 | |
217 | if (RHS.isSmall()) { |
218 | // Copy a small RHS rather than moving. |
219 | CurArray = SmallStorage; |
220 | std::copy(first: RHS.CurArray, last: RHS.CurArray + RHS.NumNonEmpty, result: CurArray); |
221 | } else { |
222 | CurArray = RHS.CurArray; |
223 | RHS.CurArray = RHSSmallStorage; |
224 | } |
225 | |
226 | // Copy the rest of the trivial members. |
227 | CurArraySize = RHS.CurArraySize; |
228 | NumNonEmpty = RHS.NumNonEmpty; |
229 | NumTombstones = RHS.NumTombstones; |
230 | IsSmall = RHS.IsSmall; |
231 | |
232 | // Make the RHS small and empty. |
233 | RHS.CurArraySize = SmallSize; |
234 | RHS.NumNonEmpty = 0; |
235 | RHS.NumTombstones = 0; |
236 | RHS.IsSmall = true; |
237 | } |
238 | |
239 | void SmallPtrSetImplBase::swap(const void **SmallStorage, |
240 | const void **RHSSmallStorage, |
241 | SmallPtrSetImplBase &RHS) { |
242 | if (this == &RHS) return; |
243 | |
244 | // We can only avoid copying elements if neither set is small. |
245 | if (!this->isSmall() && !RHS.isSmall()) { |
246 | std::swap(a&: this->CurArray, b&: RHS.CurArray); |
247 | std::swap(a&: this->CurArraySize, b&: RHS.CurArraySize); |
248 | std::swap(a&: this->NumNonEmpty, b&: RHS.NumNonEmpty); |
249 | std::swap(a&: this->NumTombstones, b&: RHS.NumTombstones); |
250 | return; |
251 | } |
252 | |
253 | // FIXME: From here on we assume that both sets have the same small size. |
254 | |
255 | // If only RHS is small, copy the small elements into LHS and move the pointer |
256 | // from LHS to RHS. |
257 | if (!this->isSmall() && RHS.isSmall()) { |
258 | std::copy(first: RHS.CurArray, last: RHS.CurArray + RHS.NumNonEmpty, result: SmallStorage); |
259 | std::swap(a&: RHS.CurArraySize, b&: this->CurArraySize); |
260 | std::swap(a&: this->NumNonEmpty, b&: RHS.NumNonEmpty); |
261 | std::swap(a&: this->NumTombstones, b&: RHS.NumTombstones); |
262 | RHS.CurArray = this->CurArray; |
263 | RHS.IsSmall = false; |
264 | this->CurArray = SmallStorage; |
265 | this->IsSmall = true; |
266 | return; |
267 | } |
268 | |
269 | // If only LHS is small, copy the small elements into RHS and move the pointer |
270 | // from RHS to LHS. |
271 | if (this->isSmall() && !RHS.isSmall()) { |
272 | std::copy(first: this->CurArray, last: this->CurArray + this->NumNonEmpty, |
273 | result: RHSSmallStorage); |
274 | std::swap(a&: RHS.CurArraySize, b&: this->CurArraySize); |
275 | std::swap(a&: RHS.NumNonEmpty, b&: this->NumNonEmpty); |
276 | std::swap(a&: RHS.NumTombstones, b&: this->NumTombstones); |
277 | this->CurArray = RHS.CurArray; |
278 | this->IsSmall = false; |
279 | RHS.CurArray = RHSSmallStorage; |
280 | RHS.IsSmall = true; |
281 | return; |
282 | } |
283 | |
284 | // Both a small, just swap the small elements. |
285 | assert(this->isSmall() && RHS.isSmall()); |
286 | unsigned MinNonEmpty = std::min(a: this->NumNonEmpty, b: RHS.NumNonEmpty); |
287 | std::swap_ranges(first1: this->CurArray, last1: this->CurArray + MinNonEmpty, first2: RHS.CurArray); |
288 | if (this->NumNonEmpty > MinNonEmpty) { |
289 | std::copy(first: this->CurArray + MinNonEmpty, last: this->CurArray + this->NumNonEmpty, |
290 | result: RHS.CurArray + MinNonEmpty); |
291 | } else { |
292 | std::copy(first: RHS.CurArray + MinNonEmpty, last: RHS.CurArray + RHS.NumNonEmpty, |
293 | result: this->CurArray + MinNonEmpty); |
294 | } |
295 | assert(this->CurArraySize == RHS.CurArraySize); |
296 | std::swap(a&: this->NumNonEmpty, b&: RHS.NumNonEmpty); |
297 | std::swap(a&: this->NumTombstones, b&: RHS.NumTombstones); |
298 | } |
299 | |