1//== RegionStore.cpp - Field-sensitive store model --------------*- C++ -*--==//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8//
9// This file defines a basic region store model. In this model, we do have field
10// sensitivity. But we assume nothing about the heap shape. So recursive data
11// structures are largely ignored. Basically we do 1-limiting analysis.
12// Parameter pointers are assumed with no aliasing. Pointee objects of
13// parameters are created lazily.
14//
15//===----------------------------------------------------------------------===//
16
17#include "clang/AST/Attr.h"
18#include "clang/AST/CharUnits.h"
19#include "clang/ASTMatchers/ASTMatchFinder.h"
20#include "clang/Analysis/AnalysisDeclContext.h"
21#include "clang/Basic/JsonSupport.h"
22#include "clang/Basic/TargetInfo.h"
23#include "clang/StaticAnalyzer/Core/PathSensitive/AnalysisManager.h"
24#include "clang/StaticAnalyzer/Core/PathSensitive/CallEvent.h"
25#include "clang/StaticAnalyzer/Core/PathSensitive/ExprEngine.h"
26#include "clang/StaticAnalyzer/Core/PathSensitive/MemRegion.h"
27#include "clang/StaticAnalyzer/Core/PathSensitive/ProgramState.h"
28#include "llvm/ADT/ImmutableMap.h"
29#include "llvm/ADT/STLExtras.h"
30#include "llvm/Support/TimeProfiler.h"
31#include "llvm/Support/raw_ostream.h"
32#include <limits>
33#include <optional>
34#include <utility>
35
36using namespace clang;
37using namespace ento;
38
39//===----------------------------------------------------------------------===//
40// Representation of binding keys.
41//===----------------------------------------------------------------------===//
42
43namespace {
44class BindingKey {
45public:
46 enum Kind { Default = 0x0, Direct = 0x1 };
47private:
48 enum { Symbolic = 0x2 };
49
50 llvm::PointerIntPair<const MemRegion *, 2> P;
51 uint64_t Data;
52
53 /// Create a key for a binding to region \p r, which has a symbolic offset
54 /// from region \p Base.
55 explicit BindingKey(const SubRegion *r, const SubRegion *Base, Kind k)
56 : P(r, k | Symbolic), Data(reinterpret_cast<uintptr_t>(Base)) {
57 assert(r && Base && "Must have known regions.");
58 assert(getConcreteOffsetRegion() == Base && "Failed to store base region");
59 }
60
61 /// Create a key for a binding at \p offset from base region \p r.
62 explicit BindingKey(const MemRegion *r, uint64_t offset, Kind k)
63 : P(r, k), Data(offset) {
64 assert(r && "Must have known regions.");
65 assert(getOffset() == offset && "Failed to store offset");
66 assert((r == r->getBaseRegion() ||
67 isa<ObjCIvarRegion, CXXDerivedObjectRegion>(r)) &&
68 "Not a base");
69 }
70
71public:
72 bool isDirect() const { return P.getInt() & Direct; }
73 bool isDefault() const { return !isDirect(); }
74 bool hasSymbolicOffset() const { return P.getInt() & Symbolic; }
75
76 const MemRegion *getRegion() const { return P.getPointer(); }
77 uint64_t getOffset() const {
78 assert(!hasSymbolicOffset());
79 return Data;
80 }
81
82 const SubRegion *getConcreteOffsetRegion() const {
83 assert(hasSymbolicOffset());
84 return reinterpret_cast<const SubRegion *>(static_cast<uintptr_t>(Data));
85 }
86
87 const MemRegion *getBaseRegion() const {
88 if (hasSymbolicOffset())
89 return getConcreteOffsetRegion()->getBaseRegion();
90 return getRegion()->getBaseRegion();
91 }
92
93 void Profile(llvm::FoldingSetNodeID& ID) const {
94 ID.AddPointer(Ptr: P.getOpaqueValue());
95 ID.AddInteger(I: Data);
96 }
97
98 static BindingKey Make(const MemRegion *R, Kind k);
99
100 bool operator<(const BindingKey &X) const {
101 if (P.getOpaqueValue() < X.P.getOpaqueValue())
102 return true;
103 if (P.getOpaqueValue() > X.P.getOpaqueValue())
104 return false;
105 return Data < X.Data;
106 }
107
108 bool operator==(const BindingKey &X) const {
109 return P.getOpaqueValue() == X.P.getOpaqueValue() &&
110 Data == X.Data;
111 }
112
113 LLVM_DUMP_METHOD void dump() const;
114};
115
116std::string locDescr(Loc L) {
117 std::string S;
118 llvm::raw_string_ostream OS(S);
119 L.dumpToStream(Out&: OS);
120 return OS.str();
121}
122} // end anonymous namespace
123
124BindingKey BindingKey::Make(const MemRegion *R, Kind k) {
125 const RegionOffset &RO = R->getAsOffset();
126 if (RO.hasSymbolicOffset())
127 return BindingKey(cast<SubRegion>(Val: R), cast<SubRegion>(Val: RO.getRegion()), k);
128
129 return BindingKey(RO.getRegion(), RO.getOffset(), k);
130}
131
132namespace llvm {
133static inline raw_ostream &operator<<(raw_ostream &Out, BindingKey K) {
134 Out << "\"kind\": \"" << (K.isDirect() ? "Direct" : "Default")
135 << "\", \"offset\": ";
136
137 if (!K.hasSymbolicOffset())
138 Out << K.getOffset();
139 else
140 Out << "null";
141
142 return Out;
143}
144
145} // namespace llvm
146
147#if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
148void BindingKey::dump() const { llvm::errs() << *this; }
149#endif
150
151//===----------------------------------------------------------------------===//
152// Actual Store type.
153//===----------------------------------------------------------------------===//
154
155typedef llvm::ImmutableMap<BindingKey, SVal> ClusterBindings;
156typedef llvm::ImmutableMapRef<BindingKey, SVal> ClusterBindingsRef;
157typedef std::pair<BindingKey, SVal> BindingPair;
158
159typedef llvm::ImmutableMap<const MemRegion *, ClusterBindings>
160 RegionBindings;
161
162namespace {
163class RegionBindingsRef : public llvm::ImmutableMapRef<const MemRegion *,
164 ClusterBindings> {
165 ClusterBindings::Factory *CBFactory;
166
167 // This flag indicates whether the current bindings are within the analysis
168 // that has started from main(). It affects how we perform loads from
169 // global variables that have initializers: if we have observed the
170 // program execution from the start and we know that these variables
171 // have not been overwritten yet, we can be sure that their initializers
172 // are still relevant. This flag never gets changed when the bindings are
173 // updated, so it could potentially be moved into RegionStoreManager
174 // (as if it's the same bindings but a different loading procedure)
175 // however that would have made the manager needlessly stateful.
176 bool IsMainAnalysis;
177
178public:
179 typedef llvm::ImmutableMapRef<const MemRegion *, ClusterBindings>
180 ParentTy;
181
182 RegionBindingsRef(ClusterBindings::Factory &CBFactory,
183 const RegionBindings::TreeTy *T,
184 RegionBindings::TreeTy::Factory *F, bool IsMainAnalysis)
185 : RegionBindingsRef(ParentTy(T, F), CBFactory, IsMainAnalysis) {}
186
187 RegionBindingsRef(const ParentTy &P, ClusterBindings::Factory &CBFactory,
188 bool IsMainAnalysis)
189 : ParentTy(P), CBFactory(&CBFactory), IsMainAnalysis(IsMainAnalysis) {}
190
191 RegionBindingsRef removeCluster(const MemRegion *BaseRegion) const {
192 return RegionBindingsRef(ParentTy::remove(K: BaseRegion), *CBFactory,
193 IsMainAnalysis);
194 }
195
196 RegionBindingsRef addBinding(BindingKey K, SVal V) const;
197
198 RegionBindingsRef addBinding(const MemRegion *R,
199 BindingKey::Kind k, SVal V) const;
200
201 const SVal *lookup(BindingKey K) const;
202 const SVal *lookup(const MemRegion *R, BindingKey::Kind k) const;
203 using llvm::ImmutableMapRef<const MemRegion *, ClusterBindings>::lookup;
204
205 RegionBindingsRef removeBinding(BindingKey K);
206
207 RegionBindingsRef removeBinding(const MemRegion *R,
208 BindingKey::Kind k);
209
210 RegionBindingsRef removeBinding(const MemRegion *R) {
211 return removeBinding(R, k: BindingKey::Direct).
212 removeBinding(R, k: BindingKey::Default);
213 }
214
215 std::optional<SVal> getDirectBinding(const MemRegion *R) const;
216
217 /// getDefaultBinding - Returns an SVal* representing an optional default
218 /// binding associated with a region and its subregions.
219 std::optional<SVal> getDefaultBinding(const MemRegion *R) const;
220
221 /// Return the internal tree as a Store.
222 Store asStore() const {
223 llvm::PointerIntPair<Store, 1, bool> Ptr = {
224 asImmutableMap().getRootWithoutRetain(), IsMainAnalysis};
225 return reinterpret_cast<Store>(Ptr.getOpaqueValue());
226 }
227
228 bool isMainAnalysis() const {
229 return IsMainAnalysis;
230 }
231
232 void printJson(raw_ostream &Out, const char *NL = "\n",
233 unsigned int Space = 0, bool IsDot = false) const {
234 using namespace llvm;
235 DenseMap<const MemRegion *, std::string> StringifyCache;
236 auto ToString = [&StringifyCache](const MemRegion *R) {
237 auto [Place, Inserted] = StringifyCache.try_emplace(Key: R);
238 if (!Inserted)
239 return Place->second;
240 std::string Res;
241 raw_string_ostream OS(Res);
242 OS << R;
243 Place->second = Res;
244 return Res;
245 };
246
247 using Cluster =
248 std::pair<const MemRegion *, ImmutableMap<BindingKey, SVal>>;
249 using Binding = std::pair<BindingKey, SVal>;
250
251 const auto MemSpaceBeforeRegionName = [&ToString](const Cluster *L,
252 const Cluster *R) {
253 if (isa<MemSpaceRegion>(Val: L->first) && !isa<MemSpaceRegion>(Val: R->first))
254 return true;
255 if (!isa<MemSpaceRegion>(Val: L->first) && isa<MemSpaceRegion>(Val: R->first))
256 return false;
257 return ToString(L->first) < ToString(R->first);
258 };
259
260 const auto SymbolicBeforeOffset = [&ToString](const BindingKey &L,
261 const BindingKey &R) {
262 if (L.hasSymbolicOffset() && !R.hasSymbolicOffset())
263 return true;
264 if (!L.hasSymbolicOffset() && R.hasSymbolicOffset())
265 return false;
266 if (L.hasSymbolicOffset() && R.hasSymbolicOffset())
267 return ToString(L.getRegion()) < ToString(R.getRegion());
268 return L.getOffset() < R.getOffset();
269 };
270
271 const auto DefaultBindingBeforeDirectBindings =
272 [&SymbolicBeforeOffset](const Binding *LPtr, const Binding *RPtr) {
273 const BindingKey &L = LPtr->first;
274 const BindingKey &R = RPtr->first;
275 if (L.isDefault() && !R.isDefault())
276 return true;
277 if (!L.isDefault() && R.isDefault())
278 return false;
279 assert(L.isDefault() == R.isDefault());
280 return SymbolicBeforeOffset(L, R);
281 };
282
283 const auto AddrOf = [](const auto &Item) { return &Item; };
284
285 std::vector<const Cluster *> SortedClusters;
286 SortedClusters.reserve(n: std::distance(first: begin(), last: end()));
287 append_range(C&: SortedClusters, R: map_range(C: *this, F: AddrOf));
288 llvm::sort(C&: SortedClusters, Comp: MemSpaceBeforeRegionName);
289
290 for (auto [Idx, C] : llvm::enumerate(First&: SortedClusters)) {
291 const auto &[BaseRegion, Bindings] = *C;
292 Indent(Out, Space, IsDot)
293 << "{ \"cluster\": \"" << BaseRegion << "\", \"pointer\": \""
294 << (const void *)BaseRegion << "\", \"items\": [" << NL;
295
296 std::vector<const Binding *> SortedBindings;
297 SortedBindings.reserve(n: std::distance(first: Bindings.begin(), last: Bindings.end()));
298 append_range(C&: SortedBindings, R: map_range(C: Bindings, F: AddrOf));
299 llvm::sort(C&: SortedBindings, Comp: DefaultBindingBeforeDirectBindings);
300
301 ++Space;
302 for (auto [Idx, B] : llvm::enumerate(First&: SortedBindings)) {
303 const auto &[Key, Value] = *B;
304 Indent(Out, Space, IsDot) << "{ " << Key << ", \"value\": ";
305 Value.printJson(Out, /*AddQuotes=*/true);
306 Out << " }";
307 if (Idx != SortedBindings.size() - 1)
308 Out << ',';
309 Out << NL;
310 }
311 --Space;
312 Indent(Out, Space, IsDot) << "]}";
313 if (Idx != SortedClusters.size() - 1)
314 Out << ',';
315 Out << NL;
316 }
317 }
318
319 LLVM_DUMP_METHOD void dump() const { printJson(Out&: llvm::errs()); }
320
321protected:
322 RegionBindingsRef
323 commitBindingsToCluster(const MemRegion *BaseRegion,
324 const ClusterBindings &Bindings) const;
325};
326} // end anonymous namespace
327
328/// This class represents the same as \c RegionBindingsRef, but with a limit on
329/// the number of bindings that can be added.
330class LimitedRegionBindingsRef : public RegionBindingsRef {
331public:
332 LimitedRegionBindingsRef(RegionBindingsRef Base,
333 SmallVectorImpl<SVal> &EscapedValuesDuringBind,
334 std::optional<unsigned> BindingsLeft)
335 : RegionBindingsRef(Base),
336 EscapedValuesDuringBind(&EscapedValuesDuringBind),
337 BindingsLeft(BindingsLeft) {}
338
339 bool hasExhaustedBindingLimit() const {
340 return BindingsLeft.has_value() && BindingsLeft.value() == 0;
341 }
342
343 LimitedRegionBindingsRef withValuesEscaped(SVal V) const {
344 EscapedValuesDuringBind->push_back(Elt: V);
345 return *this;
346 }
347
348 LimitedRegionBindingsRef
349 withValuesEscaped(nonloc::CompoundVal::iterator Begin,
350 nonloc::CompoundVal::iterator End) const {
351 for (SVal V : llvm::make_range(x: Begin, y: End))
352 withValuesEscaped(V);
353 return *this;
354 }
355
356 LimitedRegionBindingsRef
357 addWithoutDecreasingLimit(const MemRegion *BaseRegion,
358 data_type_ref BindingKeyAndValue) const {
359 return LimitedRegionBindingsRef{RegionBindingsRef::commitBindingsToCluster(
360 BaseRegion, Bindings: BindingKeyAndValue),
361 *EscapedValuesDuringBind, BindingsLeft};
362 }
363
364 LimitedRegionBindingsRef removeCluster(const MemRegion *BaseRegion) const {
365 return LimitedRegionBindingsRef{
366 RegionBindingsRef::removeCluster(BaseRegion), *EscapedValuesDuringBind,
367 BindingsLeft};
368 }
369
370 LimitedRegionBindingsRef addBinding(BindingKey K, SVal V) const {
371 std::optional<unsigned> NewBindingsLeft = BindingsLeft;
372 if (NewBindingsLeft.has_value()) {
373 assert(NewBindingsLeft.value() != 0);
374 NewBindingsLeft.value() -= 1;
375
376 // If we just exhausted the binding limit, highjack
377 // this bind call for the default binding.
378 if (NewBindingsLeft.value() == 0) {
379 withValuesEscaped(V);
380 K = BindingKey::Make(R: K.getRegion(), k: BindingKey::Default);
381 V = UnknownVal();
382 }
383 }
384
385 return LimitedRegionBindingsRef{RegionBindingsRef::addBinding(K, V),
386 *EscapedValuesDuringBind, NewBindingsLeft};
387 }
388
389 LimitedRegionBindingsRef addBinding(const MemRegion *R, BindingKey::Kind k,
390 SVal V) const {
391 return addBinding(K: BindingKey::Make(R, k), V);
392 }
393
394private:
395 SmallVectorImpl<SVal> *EscapedValuesDuringBind; // nonnull
396 std::optional<unsigned> BindingsLeft;
397};
398
399typedef const RegionBindingsRef& RegionBindingsConstRef;
400typedef const LimitedRegionBindingsRef &LimitedRegionBindingsConstRef;
401
402std::optional<SVal>
403RegionBindingsRef::getDirectBinding(const MemRegion *R) const {
404 const SVal *V = lookup(R, k: BindingKey::Direct);
405 return V ? std::optional<SVal>(*V) : std::nullopt;
406}
407
408std::optional<SVal>
409RegionBindingsRef::getDefaultBinding(const MemRegion *R) const {
410 const SVal *V = lookup(R, k: BindingKey::Default);
411 return V ? std::optional<SVal>(*V) : std::nullopt;
412}
413
414RegionBindingsRef RegionBindingsRef::commitBindingsToCluster(
415 const MemRegion *BaseRegion, const ClusterBindings &Bindings) const {
416 return RegionBindingsRef(ParentTy::add(K: BaseRegion, D: Bindings), *CBFactory,
417 IsMainAnalysis);
418}
419
420RegionBindingsRef RegionBindingsRef::addBinding(BindingKey K, SVal V) const {
421 const MemRegion *Base = K.getBaseRegion();
422
423 const ClusterBindings *ExistingCluster = lookup(K: Base);
424 ClusterBindings Bindings =
425 (ExistingCluster ? *ExistingCluster : CBFactory->getEmptyMap());
426 Bindings = CBFactory->add(Old: Bindings, K, D: V);
427 return commitBindingsToCluster(BaseRegion: Base, Bindings);
428}
429
430RegionBindingsRef RegionBindingsRef::addBinding(const MemRegion *R,
431 BindingKey::Kind k,
432 SVal V) const {
433 return addBinding(K: BindingKey::Make(R, k), V);
434}
435
436const SVal *RegionBindingsRef::lookup(BindingKey K) const {
437 const ClusterBindings *Cluster = lookup(K: K.getBaseRegion());
438 if (!Cluster)
439 return nullptr;
440 return Cluster->lookup(K);
441}
442
443const SVal *RegionBindingsRef::lookup(const MemRegion *R,
444 BindingKey::Kind k) const {
445 return lookup(K: BindingKey::Make(R, k));
446}
447
448RegionBindingsRef RegionBindingsRef::removeBinding(BindingKey K) {
449 const MemRegion *Base = K.getBaseRegion();
450 const ClusterBindings *Cluster = lookup(K: Base);
451 if (!Cluster)
452 return *this;
453
454 ClusterBindings NewCluster = CBFactory->remove(Old: *Cluster, K);
455 if (NewCluster.isEmpty())
456 return removeCluster(BaseRegion: Base);
457 return commitBindingsToCluster(BaseRegion: Base, Bindings: NewCluster);
458}
459
460RegionBindingsRef RegionBindingsRef::removeBinding(const MemRegion *R,
461 BindingKey::Kind k){
462 return removeBinding(K: BindingKey::Make(R, k));
463}
464
465//===----------------------------------------------------------------------===//
466// Main RegionStore logic.
467//===----------------------------------------------------------------------===//
468
469namespace {
470class InvalidateRegionsWorker;
471
472class RegionStoreManager : public StoreManager {
473public:
474 RegionBindings::Factory RBFactory;
475 mutable ClusterBindings::Factory CBFactory;
476
477 typedef std::vector<SVal> SValListTy;
478private:
479 typedef llvm::DenseMap<const LazyCompoundValData *,
480 SValListTy> LazyBindingsMapTy;
481 LazyBindingsMapTy LazyBindingsMap;
482
483 /// The largest number of fields a struct can have and still be
484 /// considered "small".
485 ///
486 /// This is currently used to decide whether or not it is worth "forcing" a
487 /// LazyCompoundVal on bind.
488 ///
489 /// This is controlled by 'region-store-small-struct-limit' option.
490 /// To disable all small-struct-dependent behavior, set the option to "0".
491 const unsigned SmallStructLimit;
492
493 /// The largest number of element an array can have and still be
494 /// considered "small".
495 ///
496 /// This is currently used to decide whether or not it is worth "forcing" a
497 /// LazyCompoundVal on bind.
498 ///
499 /// This is controlled by 'region-store-small-struct-limit' option.
500 /// To disable all small-struct-dependent behavior, set the option to "0".
501 const unsigned SmallArrayLimit;
502
503 /// The number of bindings a single bind operation can scatter into.
504 /// For example, binding the initializer-list of an array would recurse and
505 /// bind all the individual array elements, potentially causing scalability
506 /// issues. Nullopt if the limit is disabled.
507 const std::optional<unsigned> RegionStoreMaxBindingFanOutPlusOne;
508
509 /// A helper used to populate the work list with the given set of
510 /// regions.
511 void populateWorkList(InvalidateRegionsWorker &W,
512 ArrayRef<SVal> Values,
513 InvalidatedRegions *TopLevelRegions);
514
515 const AnalyzerOptions &getOptions() {
516 return StateMgr.getOwningEngine().getAnalysisManager().options;
517 }
518
519public:
520 RegionStoreManager(ProgramStateManager &mgr)
521 : StoreManager(mgr), RBFactory(mgr.getAllocator()),
522 CBFactory(mgr.getAllocator()),
523 SmallStructLimit(getOptions().RegionStoreSmallStructLimit),
524 SmallArrayLimit(getOptions().RegionStoreSmallArrayLimit),
525 RegionStoreMaxBindingFanOutPlusOne([&]() -> std::optional<unsigned> {
526 unsigned FanOut = getOptions().RegionStoreMaxBindingFanOut;
527 assert(FanOut != std::numeric_limits<unsigned>::max());
528 if (FanOut == 0)
529 return std::nullopt;
530 return FanOut + 1 /*for the default binding*/;
531 }()) {}
532
533 /// setImplicitDefaultValue - Set the default binding for the provided
534 /// MemRegion to the value implicitly defined for compound literals when
535 /// the value is not specified.
536 LimitedRegionBindingsRef
537 setImplicitDefaultValue(LimitedRegionBindingsConstRef B, const MemRegion *R,
538 QualType T);
539
540 /// ArrayToPointer - Emulates the "decay" of an array to a pointer
541 /// type. 'Array' represents the lvalue of the array being decayed
542 /// to a pointer, and the returned SVal represents the decayed
543 /// version of that lvalue (i.e., a pointer to the first element of
544 /// the array). This is called by ExprEngine when evaluating
545 /// casts from arrays to pointers.
546 SVal ArrayToPointer(Loc Array, QualType ElementTy) override;
547
548 /// Creates the Store that correctly represents memory contents before
549 /// the beginning of the analysis of the given top-level stack frame.
550 StoreRef getInitialStore(const LocationContext *InitLoc) override {
551 bool IsMainAnalysis = false;
552 if (const auto *FD = dyn_cast<FunctionDecl>(Val: InitLoc->getDecl()))
553 IsMainAnalysis = FD->isMain() && !Ctx.getLangOpts().CPlusPlus;
554 return StoreRef(RegionBindingsRef(RegionBindingsRef::ParentTy(
555 RBFactory.getEmptyMap(), RBFactory),
556 CBFactory, IsMainAnalysis)
557 .asStore(),
558 *this);
559 }
560
561 //===-------------------------------------------------------------------===//
562 // Binding values to regions.
563 //===-------------------------------------------------------------------===//
564 RegionBindingsRef
565 invalidateGlobalRegion(MemRegion::Kind K, ConstCFGElementRef Elem,
566 unsigned Count, const LocationContext *LCtx,
567 RegionBindingsRef B, InvalidatedRegions *Invalidated);
568
569 StoreRef invalidateRegions(Store store, ArrayRef<SVal> Values,
570 ConstCFGElementRef Elem, unsigned Count,
571 const LocationContext *LCtx, const CallEvent *Call,
572 InvalidatedSymbols &IS,
573 RegionAndSymbolInvalidationTraits &ITraits,
574 InvalidatedRegions *Invalidated,
575 InvalidatedRegions *InvalidatedTopLevel) override;
576
577 bool scanReachableSymbols(Store S, const MemRegion *R,
578 ScanReachableSymbols &Callbacks) override;
579
580 LimitedRegionBindingsRef
581 removeSubRegionBindings(LimitedRegionBindingsConstRef B, const SubRegion *R);
582 std::optional<SVal>
583 getConstantValFromConstArrayInitializer(RegionBindingsConstRef B,
584 const ElementRegion *R);
585 std::optional<SVal>
586 getSValFromInitListExpr(const InitListExpr *ILE,
587 const SmallVector<uint64_t, 2> &ConcreteOffsets,
588 QualType ElemT);
589 SVal getSValFromStringLiteral(const StringLiteral *SL, uint64_t Offset,
590 QualType ElemT);
591
592public: // Part of public interface to class.
593 BindResult Bind(Store store, Loc LV, SVal V) override {
594 llvm::SmallVector<SVal, 0> EscapedValuesDuringBind;
595 LimitedRegionBindingsRef BoundedBindings =
596 getRegionBindings(store, EscapedValuesDuringBind);
597 return BindResult{.ResultingStore: StoreRef(bind(B: BoundedBindings, LV, V).asStore(), *this),
598 .FailedToBindValues: std::move(EscapedValuesDuringBind)};
599 }
600
601 LimitedRegionBindingsRef bind(LimitedRegionBindingsConstRef B, Loc LV,
602 SVal V);
603
604 // BindDefaultInitial is only used to initialize a region with
605 // a default value.
606 BindResult BindDefaultInitial(Store store, const MemRegion *R,
607 SVal V) override {
608 RegionBindingsRef B = getRegionBindings(store);
609 // Use other APIs when you have to wipe the region that was initialized
610 // earlier.
611 assert(!(B.getDefaultBinding(R) || B.getDirectBinding(R)) &&
612 "Double initialization!");
613 B = B.addBinding(K: BindingKey::Make(R, k: BindingKey::Default), V);
614 return BindResult{
615 .ResultingStore: StoreRef(B.asImmutableMap().getRootWithoutRetain(), *this), .FailedToBindValues: {}};
616 }
617
618 // BindDefaultZero is used for zeroing constructors that may accidentally
619 // overwrite existing bindings.
620 BindResult BindDefaultZero(Store store, const MemRegion *R) override {
621 // FIXME: The offsets of empty bases can be tricky because of
622 // of the so called "empty base class optimization".
623 // If a base class has been optimized out
624 // we should not try to create a binding, otherwise we should.
625 // Unfortunately, at the moment ASTRecordLayout doesn't expose
626 // the actual sizes of the empty bases
627 // and trying to infer them from offsets/alignments
628 // seems to be error-prone and non-trivial because of the trailing padding.
629 // As a temporary mitigation we don't create bindings for empty bases.
630 if (const auto *BR = dyn_cast<CXXBaseObjectRegion>(Val: R))
631 if (BR->getDecl()->isEmpty())
632 return BindResult{.ResultingStore: StoreRef(store, *this), .FailedToBindValues: {}};
633
634 llvm::SmallVector<SVal, 0> EscapedValuesDuringBind;
635 LimitedRegionBindingsRef B =
636 getRegionBindings(store, EscapedValuesDuringBind);
637 SVal V = svalBuilder.makeZeroVal(type: Ctx.CharTy);
638 B = removeSubRegionBindings(B, R: cast<SubRegion>(Val: R));
639 B = B.addBinding(K: BindingKey::Make(R, k: BindingKey::Default), V);
640 return BindResult{
641 .ResultingStore: StoreRef(B.asImmutableMap().getRootWithoutRetain(), *this),
642 .FailedToBindValues: std::move(EscapedValuesDuringBind)};
643 }
644
645 /// Attempt to extract the fields of \p LCV and bind them to the struct region
646 /// \p R.
647 ///
648 /// This path is used when it seems advantageous to "force" loading the values
649 /// within a LazyCompoundVal to bind memberwise to the struct region, rather
650 /// than using a Default binding at the base of the entire region. This is a
651 /// heuristic attempting to avoid building long chains of LazyCompoundVals.
652 ///
653 /// \returns The updated store bindings, or \c std::nullopt if binding
654 /// non-lazily would be too expensive.
655 std::optional<LimitedRegionBindingsRef>
656 tryBindSmallStruct(LimitedRegionBindingsConstRef B, const TypedValueRegion *R,
657 const RecordDecl *RD, nonloc::LazyCompoundVal LCV);
658
659 /// BindStruct - Bind a compound value to a structure.
660 LimitedRegionBindingsRef bindStruct(LimitedRegionBindingsConstRef B,
661 const TypedValueRegion *R, SVal V);
662
663 /// BindVector - Bind a compound value to a vector.
664 LimitedRegionBindingsRef bindVector(LimitedRegionBindingsConstRef B,
665 const TypedValueRegion *R, SVal V);
666
667 std::optional<LimitedRegionBindingsRef>
668 tryBindSmallArray(LimitedRegionBindingsConstRef B, const TypedValueRegion *R,
669 const ArrayType *AT, nonloc::LazyCompoundVal LCV);
670
671 LimitedRegionBindingsRef bindArray(LimitedRegionBindingsConstRef B,
672 const TypedValueRegion *R, SVal V);
673
674 /// Clears out all bindings in the given region and assigns a new value
675 /// as a Default binding.
676 LimitedRegionBindingsRef bindAggregate(LimitedRegionBindingsConstRef B,
677 const TypedRegion *R, SVal DefaultVal);
678
679 /// Create a new store with the specified binding removed.
680 /// \param ST the original store, that is the basis for the new store.
681 /// \param L the location whose binding should be removed.
682 StoreRef killBinding(Store ST, Loc L) override;
683
684 void incrementReferenceCount(Store store) override {
685 getRegionBindings(store).manualRetain();
686 }
687
688 /// If the StoreManager supports it, decrement the reference count of
689 /// the specified Store object. If the reference count hits 0, the memory
690 /// associated with the object is recycled.
691 void decrementReferenceCount(Store store) override {
692 getRegionBindings(store).manualRelease();
693 }
694
695 bool includedInBindings(Store store, const MemRegion *region) const override;
696
697 /// Return the value bound to specified location in a given state.
698 ///
699 /// The high level logic for this method is this:
700 /// getBinding (L)
701 /// if L has binding
702 /// return L's binding
703 /// else if L is in killset
704 /// return unknown
705 /// else
706 /// if L is on stack or heap
707 /// return undefined
708 /// else
709 /// return symbolic
710 SVal getBinding(Store S, Loc L, QualType T) override {
711 return getBinding(B: getRegionBindings(store: S), L, T);
712 }
713
714 std::optional<SVal> getUniqueDefaultBinding(RegionBindingsConstRef B,
715 const TypedValueRegion *R) const;
716 std::optional<SVal>
717 getUniqueDefaultBinding(nonloc::LazyCompoundVal LCV) const;
718
719 std::optional<SVal> getDefaultBinding(Store S, const MemRegion *R) override {
720 RegionBindingsRef B = getRegionBindings(store: S);
721 // Default bindings are always applied over a base region so look up the
722 // base region's default binding, otherwise the lookup will fail when R
723 // is at an offset from R->getBaseRegion().
724 return B.getDefaultBinding(R: R->getBaseRegion());
725 }
726
727 SVal getBinding(RegionBindingsConstRef B, Loc L, QualType T = QualType());
728
729 SVal getBindingForElement(RegionBindingsConstRef B, const ElementRegion *R);
730
731 SVal getBindingForField(RegionBindingsConstRef B, const FieldRegion *R);
732
733 SVal getBindingForObjCIvar(RegionBindingsConstRef B, const ObjCIvarRegion *R);
734
735 SVal getBindingForVar(RegionBindingsConstRef B, const VarRegion *R);
736
737 SVal getBindingForLazySymbol(const TypedValueRegion *R);
738
739 SVal getBindingForFieldOrElementCommon(RegionBindingsConstRef B,
740 const TypedValueRegion *R,
741 QualType Ty);
742
743 SVal getLazyBinding(const SubRegion *LazyBindingRegion,
744 RegionBindingsRef LazyBinding);
745
746 /// Get bindings for the values in a struct and return a CompoundVal, used
747 /// when doing struct copy:
748 /// struct s x, y;
749 /// x = y;
750 /// y's value is retrieved by this method.
751 SVal getBindingForStruct(RegionBindingsConstRef B, const TypedValueRegion *R);
752 SVal getBindingForArray(RegionBindingsConstRef B, const TypedValueRegion *R);
753 NonLoc createLazyBinding(RegionBindingsConstRef B, const TypedValueRegion *R);
754
755 /// Used to lazily generate derived symbols for bindings that are defined
756 /// implicitly by default bindings in a super region.
757 ///
758 /// Note that callers may need to specially handle LazyCompoundVals, which
759 /// are returned as is in case the caller needs to treat them differently.
760 std::optional<SVal>
761 getBindingForDerivedDefaultValue(RegionBindingsConstRef B,
762 const MemRegion *superR,
763 const TypedValueRegion *R, QualType Ty);
764
765 /// Get the state and region whose binding this region \p R corresponds to.
766 ///
767 /// If there is no lazy binding for \p R, the returned value will have a null
768 /// \c second. Note that a null pointer can represents a valid Store.
769 std::pair<Store, const SubRegion *>
770 findLazyBinding(RegionBindingsConstRef B, const SubRegion *R,
771 const SubRegion *originalRegion);
772
773 /// Returns the cached set of interesting SVals contained within a lazy
774 /// binding.
775 ///
776 /// The precise value of "interesting" is determined for the purposes of
777 /// RegionStore's internal analysis. It must always contain all regions and
778 /// symbols, but may omit constants and other kinds of SVal.
779 ///
780 /// In contrast to compound values, LazyCompoundVals are also added
781 /// to the 'interesting values' list in addition to the child interesting
782 /// values.
783 const SValListTy &getInterestingValues(nonloc::LazyCompoundVal LCV);
784
785 //===------------------------------------------------------------------===//
786 // State pruning.
787 //===------------------------------------------------------------------===//
788
789 /// removeDeadBindings - Scans the RegionStore of 'state' for dead values.
790 /// It returns a new Store with these values removed.
791 StoreRef removeDeadBindings(Store store, const StackFrameContext *LCtx,
792 SymbolReaper& SymReaper) override;
793
794 //===------------------------------------------------------------------===//
795 // Utility methods.
796 //===------------------------------------------------------------------===//
797
798 RegionBindingsRef getRegionBindings(Store store) const {
799 llvm::PointerIntPair<Store, 1, bool> Ptr;
800 Ptr.setFromOpaqueValue(const_cast<void *>(store));
801 return {CBFactory,
802 static_cast<const RegionBindings::TreeTy *>(Ptr.getPointer()),
803 RBFactory.getTreeFactory(), Ptr.getInt()};
804 }
805
806 LimitedRegionBindingsRef
807 getRegionBindings(Store store,
808 SmallVectorImpl<SVal> &EscapedValuesDuringBind) const {
809 return LimitedRegionBindingsRef(
810 getRegionBindings(store), EscapedValuesDuringBind,
811 /*BindingsLeft=*/RegionStoreMaxBindingFanOutPlusOne);
812 }
813
814 void printJson(raw_ostream &Out, Store S, const char *NL = "\n",
815 unsigned int Space = 0, bool IsDot = false) const override;
816
817 void iterBindings(Store store, BindingsHandler& f) override {
818 RegionBindingsRef B = getRegionBindings(store);
819 for (const auto &[Region, Cluster] : B) {
820 for (const auto &[Key, Value] : Cluster) {
821 if (!Key.isDirect())
822 continue;
823 if (const SubRegion *R = dyn_cast<SubRegion>(Val: Key.getRegion())) {
824 // FIXME: Possibly incorporate the offset?
825 if (!f.HandleBinding(SMgr&: *this, store, region: R, val: Value))
826 return;
827 }
828 }
829 }
830 }
831};
832
833} // end anonymous namespace
834
835//===----------------------------------------------------------------------===//
836// RegionStore creation.
837//===----------------------------------------------------------------------===//
838
839std::unique_ptr<StoreManager>
840ento::CreateRegionStoreManager(ProgramStateManager &StMgr) {
841 return std::make_unique<RegionStoreManager>(args&: StMgr);
842}
843
844//===----------------------------------------------------------------------===//
845// Region Cluster analysis.
846//===----------------------------------------------------------------------===//
847
848namespace {
849/// Used to determine which global regions are automatically included in the
850/// initial worklist of a ClusterAnalysis.
851enum GlobalsFilterKind {
852 /// Don't include any global regions.
853 GFK_None,
854 /// Only include system globals.
855 GFK_SystemOnly,
856 /// Include all global regions.
857 GFK_All
858};
859
860template <typename DERIVED>
861class ClusterAnalysis {
862protected:
863 typedef llvm::DenseMap<const MemRegion *, const ClusterBindings *> ClusterMap;
864 typedef const MemRegion * WorkListElement;
865 typedef SmallVector<WorkListElement, 10> WorkList;
866
867 llvm::SmallPtrSet<const ClusterBindings *, 16> Visited;
868
869 WorkList WL;
870
871 RegionStoreManager &RM;
872 ASTContext &Ctx;
873 SValBuilder &svalBuilder;
874
875 RegionBindingsRef B;
876
877
878protected:
879 const ClusterBindings *getCluster(const MemRegion *R) {
880 return B.lookup(K: R);
881 }
882
883 /// Returns true if all clusters in the given memspace should be initially
884 /// included in the cluster analysis. Subclasses may provide their
885 /// own implementation.
886 bool includeEntireMemorySpace(const MemRegion *Base) {
887 return false;
888 }
889
890public:
891 ClusterAnalysis(RegionStoreManager &rm, ProgramStateManager &StateMgr,
892 RegionBindingsRef b)
893 : RM(rm), Ctx(StateMgr.getContext()),
894 svalBuilder(StateMgr.getSValBuilder()), B(std::move(b)) {}
895
896 RegionBindingsRef getRegionBindings() const { return B; }
897
898 bool isVisited(const MemRegion *R) {
899 return Visited.count(Ptr: getCluster(R));
900 }
901
902 void GenerateClusters() {
903 // Scan the entire set of bindings and record the region clusters.
904 for (RegionBindingsRef::iterator RI = B.begin(), RE = B.end();
905 RI != RE; ++RI){
906 const MemRegion *Base = RI.getKey();
907
908 const ClusterBindings &Cluster = RI.getData();
909 assert(!Cluster.isEmpty() && "Empty clusters should be removed");
910 static_cast<DERIVED*>(this)->VisitAddedToCluster(Base, Cluster);
911
912 // If the base's memspace should be entirely invalidated, add the cluster
913 // to the workspace up front.
914 if (static_cast<DERIVED*>(this)->includeEntireMemorySpace(Base))
915 AddToWorkList(WorkListElement(Base), &Cluster);
916 }
917 }
918
919 bool AddToWorkList(WorkListElement E, const ClusterBindings *C) {
920 if (C && !Visited.insert(Ptr: C).second)
921 return false;
922 WL.push_back(Elt: E);
923 return true;
924 }
925
926 bool AddToWorkList(const MemRegion *R) {
927 return static_cast<DERIVED*>(this)->AddToWorkList(R);
928 }
929
930 void RunWorkList() {
931 while (!WL.empty()) {
932 WorkListElement E = WL.pop_back_val();
933 const MemRegion *BaseR = E;
934
935 static_cast<DERIVED*>(this)->VisitCluster(BaseR, getCluster(R: BaseR));
936 }
937 }
938
939 void VisitAddedToCluster(const MemRegion *baseR, const ClusterBindings &C) {}
940 void VisitCluster(const MemRegion *baseR, const ClusterBindings *C) {}
941
942 void VisitCluster(const MemRegion *BaseR, const ClusterBindings *C,
943 bool Flag) {
944 static_cast<DERIVED*>(this)->VisitCluster(BaseR, C);
945 }
946};
947}
948
949//===----------------------------------------------------------------------===//
950// Binding invalidation.
951//===----------------------------------------------------------------------===//
952
953bool RegionStoreManager::scanReachableSymbols(Store S, const MemRegion *R,
954 ScanReachableSymbols &Callbacks) {
955 assert(R == R->getBaseRegion() && "Should only be called for base regions");
956 RegionBindingsRef B = getRegionBindings(store: S);
957 const ClusterBindings *Cluster = B.lookup(K: R);
958
959 if (!Cluster)
960 return true;
961
962 for (ClusterBindings::iterator RI = Cluster->begin(), RE = Cluster->end();
963 RI != RE; ++RI) {
964 if (!Callbacks.scan(val: RI.getData()))
965 return false;
966 }
967
968 return true;
969}
970
971static inline bool isUnionField(const FieldRegion *FR) {
972 return FR->getDecl()->getParent()->isUnion();
973}
974
975typedef SmallVector<const FieldDecl *, 8> FieldVector;
976
977static void getSymbolicOffsetFields(BindingKey K, FieldVector &Fields) {
978 assert(K.hasSymbolicOffset() && "Not implemented for concrete offset keys");
979
980 const MemRegion *Base = K.getConcreteOffsetRegion();
981 const MemRegion *R = K.getRegion();
982
983 while (R != Base) {
984 if (const FieldRegion *FR = dyn_cast<FieldRegion>(Val: R))
985 if (!isUnionField(FR))
986 Fields.push_back(Elt: FR->getDecl());
987
988 R = cast<SubRegion>(Val: R)->getSuperRegion();
989 }
990}
991
992static bool isCompatibleWithFields(BindingKey K, const FieldVector &Fields) {
993 assert(K.hasSymbolicOffset() && "Not implemented for concrete offset keys");
994
995 if (Fields.empty())
996 return true;
997
998 FieldVector FieldsInBindingKey;
999 getSymbolicOffsetFields(K, Fields&: FieldsInBindingKey);
1000
1001 ptrdiff_t Delta = FieldsInBindingKey.size() - Fields.size();
1002 if (Delta >= 0)
1003 return std::equal(first1: FieldsInBindingKey.begin() + Delta,
1004 last1: FieldsInBindingKey.end(),
1005 first2: Fields.begin());
1006 else
1007 return std::equal(first1: FieldsInBindingKey.begin(), last1: FieldsInBindingKey.end(),
1008 first2: Fields.begin() - Delta);
1009}
1010
1011/// Collects all bindings in \p Cluster that may refer to bindings within
1012/// \p Top.
1013///
1014/// Each binding is a pair whose \c first is the key (a BindingKey) and whose
1015/// \c second is the value (an SVal).
1016///
1017/// The \p IncludeAllDefaultBindings parameter specifies whether to include
1018/// default bindings that may extend beyond \p Top itself, e.g. if \p Top is
1019/// an aggregate within a larger aggregate with a default binding.
1020static void
1021collectSubRegionBindings(SmallVectorImpl<BindingPair> &Bindings,
1022 SValBuilder &SVB, const ClusterBindings &Cluster,
1023 const SubRegion *Top, BindingKey TopKey,
1024 bool IncludeAllDefaultBindings) {
1025 FieldVector FieldsInSymbolicSubregions;
1026 if (TopKey.hasSymbolicOffset()) {
1027 getSymbolicOffsetFields(K: TopKey, Fields&: FieldsInSymbolicSubregions);
1028 Top = TopKey.getConcreteOffsetRegion();
1029 TopKey = BindingKey::Make(R: Top, k: BindingKey::Default);
1030 }
1031
1032 // Find the length (in bits) of the region being invalidated.
1033 uint64_t Length = UINT64_MAX;
1034 SVal Extent = Top->getMemRegionManager().getStaticSize(MR: Top, SVB);
1035 if (std::optional<nonloc::ConcreteInt> ExtentCI =
1036 Extent.getAs<nonloc::ConcreteInt>()) {
1037 const llvm::APSInt &ExtentInt = ExtentCI->getValue();
1038 assert(ExtentInt.isNonNegative() || ExtentInt.isUnsigned());
1039 // Extents are in bytes but region offsets are in bits. Be careful!
1040 Length = ExtentInt.getLimitedValue() * SVB.getContext().getCharWidth();
1041 } else if (const FieldRegion *FR = dyn_cast<FieldRegion>(Val: Top)) {
1042 if (FR->getDecl()->isBitField())
1043 Length = FR->getDecl()->getBitWidthValue();
1044 }
1045
1046 for (const auto &StoreEntry : Cluster) {
1047 BindingKey NextKey = StoreEntry.first;
1048 if (NextKey.getRegion() == TopKey.getRegion()) {
1049 // FIXME: This doesn't catch the case where we're really invalidating a
1050 // region with a symbolic offset. Example:
1051 // R: points[i].y
1052 // Next: points[0].x
1053
1054 if (NextKey.getOffset() > TopKey.getOffset() &&
1055 NextKey.getOffset() - TopKey.getOffset() < Length) {
1056 // Case 1: The next binding is inside the region we're invalidating.
1057 // Include it.
1058 Bindings.push_back(Elt: StoreEntry);
1059
1060 } else if (NextKey.getOffset() == TopKey.getOffset()) {
1061 // Case 2: The next binding is at the same offset as the region we're
1062 // invalidating. In this case, we need to leave default bindings alone,
1063 // since they may be providing a default value for a regions beyond what
1064 // we're invalidating.
1065 // FIXME: This is probably incorrect; consider invalidating an outer
1066 // struct whose first field is bound to a LazyCompoundVal.
1067 if (IncludeAllDefaultBindings || NextKey.isDirect())
1068 Bindings.push_back(Elt: StoreEntry);
1069 }
1070
1071 } else if (NextKey.hasSymbolicOffset()) {
1072 const MemRegion *Base = NextKey.getConcreteOffsetRegion();
1073 if (Top->isSubRegionOf(R: Base) && Top != Base) {
1074 // Case 3: The next key is symbolic and we just changed something within
1075 // its concrete region. We don't know if the binding is still valid, so
1076 // we'll be conservative and include it.
1077 if (IncludeAllDefaultBindings || NextKey.isDirect())
1078 if (isCompatibleWithFields(K: NextKey, Fields: FieldsInSymbolicSubregions))
1079 Bindings.push_back(Elt: StoreEntry);
1080 } else if (const SubRegion *BaseSR = dyn_cast<SubRegion>(Val: Base)) {
1081 // Case 4: The next key is symbolic, but we changed a known
1082 // super-region. In this case the binding is certainly included.
1083 if (BaseSR->isSubRegionOf(R: Top))
1084 if (isCompatibleWithFields(K: NextKey, Fields: FieldsInSymbolicSubregions))
1085 Bindings.push_back(Elt: StoreEntry);
1086 }
1087 }
1088 }
1089}
1090
1091static void
1092collectSubRegionBindings(SmallVectorImpl<BindingPair> &Bindings,
1093 SValBuilder &SVB, const ClusterBindings &Cluster,
1094 const SubRegion *Top, bool IncludeAllDefaultBindings) {
1095 collectSubRegionBindings(Bindings, SVB, Cluster, Top,
1096 TopKey: BindingKey::Make(R: Top, k: BindingKey::Default),
1097 IncludeAllDefaultBindings);
1098}
1099
1100LimitedRegionBindingsRef
1101RegionStoreManager::removeSubRegionBindings(LimitedRegionBindingsConstRef B,
1102 const SubRegion *Top) {
1103 BindingKey TopKey = BindingKey::Make(R: Top, k: BindingKey::Default);
1104 const MemRegion *ClusterHead = TopKey.getBaseRegion();
1105
1106 if (Top == ClusterHead) {
1107 // We can remove an entire cluster's bindings all in one go.
1108 return B.removeCluster(BaseRegion: Top);
1109 }
1110
1111 const ClusterBindings *Cluster = B.lookup(K: ClusterHead);
1112 if (!Cluster) {
1113 // If we're invalidating a region with a symbolic offset, we need to make
1114 // sure we don't treat the base region as uninitialized anymore.
1115 if (TopKey.hasSymbolicOffset()) {
1116 const SubRegion *Concrete = TopKey.getConcreteOffsetRegion();
1117 return B.addBinding(R: Concrete, k: BindingKey::Default, V: UnknownVal());
1118 }
1119 return B;
1120 }
1121
1122 SmallVector<BindingPair, 32> Bindings;
1123 collectSubRegionBindings(Bindings, SVB&: svalBuilder, Cluster: *Cluster, Top, TopKey,
1124 /*IncludeAllDefaultBindings=*/false);
1125
1126 ClusterBindingsRef Result(*Cluster, CBFactory);
1127 for (BindingKey Key : llvm::make_first_range(c&: Bindings))
1128 Result = Result.remove(K: Key);
1129
1130 // If we're invalidating a region with a symbolic offset, we need to make sure
1131 // we don't treat the base region as uninitialized anymore.
1132 // FIXME: This isn't very precise; see the example in
1133 // collectSubRegionBindings.
1134 if (TopKey.hasSymbolicOffset()) {
1135 const SubRegion *Concrete = TopKey.getConcreteOffsetRegion();
1136 Result = Result.add(K: BindingKey::Make(R: Concrete, k: BindingKey::Default),
1137 D: UnknownVal());
1138 }
1139
1140 if (Result.isEmpty())
1141 return B.removeCluster(BaseRegion: ClusterHead);
1142 return B.addWithoutDecreasingLimit(BaseRegion: ClusterHead, BindingKeyAndValue: Result.asImmutableMap());
1143}
1144
1145namespace {
1146class InvalidateRegionsWorker : public ClusterAnalysis<InvalidateRegionsWorker>
1147{
1148 ConstCFGElementRef Elem;
1149 unsigned Count;
1150 const LocationContext *LCtx;
1151 InvalidatedSymbols &IS;
1152 RegionAndSymbolInvalidationTraits &ITraits;
1153 StoreManager::InvalidatedRegions *Regions;
1154 GlobalsFilterKind GlobalsFilter;
1155public:
1156 InvalidateRegionsWorker(RegionStoreManager &rm, ProgramStateManager &stateMgr,
1157 RegionBindingsRef b, ConstCFGElementRef elem,
1158 unsigned count, const LocationContext *lctx,
1159 InvalidatedSymbols &is,
1160 RegionAndSymbolInvalidationTraits &ITraitsIn,
1161 StoreManager::InvalidatedRegions *r,
1162 GlobalsFilterKind GFK)
1163 : ClusterAnalysis<InvalidateRegionsWorker>(rm, stateMgr, b), Elem(elem),
1164 Count(count), LCtx(lctx), IS(is), ITraits(ITraitsIn), Regions(r),
1165 GlobalsFilter(GFK) {}
1166
1167 void VisitCluster(const MemRegion *baseR, const ClusterBindings *C);
1168 void VisitBinding(SVal V);
1169
1170 using ClusterAnalysis::AddToWorkList;
1171
1172 bool AddToWorkList(const MemRegion *R);
1173
1174 /// Returns true if all clusters in the memory space for \p Base should be
1175 /// be invalidated.
1176 bool includeEntireMemorySpace(const MemRegion *Base);
1177
1178 /// Returns true if the memory space of the given region is one of the global
1179 /// regions specially included at the start of invalidation.
1180 bool isInitiallyIncludedGlobalRegion(const MemRegion *R);
1181};
1182}
1183
1184bool InvalidateRegionsWorker::AddToWorkList(const MemRegion *R) {
1185 bool doNotInvalidateSuperRegion = ITraits.hasTrait(
1186 MR: R, IK: RegionAndSymbolInvalidationTraits::TK_DoNotInvalidateSuperRegion);
1187 const MemRegion *BaseR = doNotInvalidateSuperRegion ? R : R->getBaseRegion();
1188 return AddToWorkList(E: WorkListElement(BaseR), C: getCluster(R: BaseR));
1189}
1190
1191void InvalidateRegionsWorker::VisitBinding(SVal V) {
1192 // A symbol? Mark it touched by the invalidation.
1193 if (SymbolRef Sym = V.getAsSymbol())
1194 IS.insert(V: Sym);
1195
1196 if (const MemRegion *R = V.getAsRegion()) {
1197 AddToWorkList(R);
1198 return;
1199 }
1200
1201 // Is it a LazyCompoundVal? All references get invalidated as well.
1202 if (std::optional<nonloc::LazyCompoundVal> LCS =
1203 V.getAs<nonloc::LazyCompoundVal>()) {
1204
1205 // `getInterestingValues()` returns SVals contained within LazyCompoundVals,
1206 // so there is no need to visit them.
1207 for (SVal V : RM.getInterestingValues(LCV: *LCS))
1208 if (!isa<nonloc::LazyCompoundVal>(Val: V))
1209 VisitBinding(V);
1210
1211 return;
1212 }
1213}
1214
1215void InvalidateRegionsWorker::VisitCluster(const MemRegion *baseR,
1216 const ClusterBindings *C) {
1217
1218 bool PreserveRegionsContents =
1219 ITraits.hasTrait(MR: baseR,
1220 IK: RegionAndSymbolInvalidationTraits::TK_PreserveContents);
1221
1222 if (C) {
1223 for (SVal Val : llvm::make_second_range(c: *C))
1224 VisitBinding(V: Val);
1225
1226 // Invalidate regions contents.
1227 if (!PreserveRegionsContents)
1228 B = B.removeCluster(BaseRegion: baseR);
1229 }
1230
1231 if (const auto *TO = dyn_cast<TypedValueRegion>(Val: baseR)) {
1232 if (const auto *RD = TO->getValueType()->getAsCXXRecordDecl()) {
1233
1234 // Lambdas can affect all static local variables without explicitly
1235 // capturing those.
1236 // We invalidate all static locals referenced inside the lambda body.
1237 if (RD->isLambda() && RD->getLambdaCallOperator()->getBody()) {
1238 using namespace ast_matchers;
1239
1240 const char *DeclBind = "DeclBind";
1241 StatementMatcher RefToStatic = stmt(hasDescendant(declRefExpr(
1242 to(InnerMatcher: varDecl(hasStaticStorageDuration()).bind(ID: DeclBind)))));
1243 auto Matches =
1244 match(Matcher: RefToStatic, Node: *RD->getLambdaCallOperator()->getBody(),
1245 Context&: RD->getASTContext());
1246
1247 for (BoundNodes &Match : Matches) {
1248 auto *VD = Match.getNodeAs<VarDecl>(ID: DeclBind);
1249 const VarRegion *ToInvalidate =
1250 RM.getRegionManager().getVarRegion(VD, LC: LCtx);
1251 AddToWorkList(R: ToInvalidate);
1252 }
1253 }
1254 }
1255 }
1256
1257 // BlockDataRegion? If so, invalidate captured variables that are passed
1258 // by reference.
1259 if (const BlockDataRegion *BR = dyn_cast<BlockDataRegion>(Val: baseR)) {
1260 for (auto Var : BR->referenced_vars()) {
1261 const VarRegion *VR = Var.getCapturedRegion();
1262 const VarDecl *VD = VR->getDecl();
1263 if (VD->hasAttr<BlocksAttr>() || !VD->hasLocalStorage()) {
1264 AddToWorkList(R: VR);
1265 }
1266 else if (Loc::isLocType(T: VR->getValueType())) {
1267 // Map the current bindings to a Store to retrieve the value
1268 // of the binding. If that binding itself is a region, we should
1269 // invalidate that region. This is because a block may capture
1270 // a pointer value, but the thing pointed by that pointer may
1271 // get invalidated.
1272 SVal V = RM.getBinding(B, L: loc::MemRegionVal(VR));
1273 if (std::optional<Loc> L = V.getAs<Loc>()) {
1274 if (const MemRegion *LR = L->getAsRegion())
1275 AddToWorkList(R: LR);
1276 }
1277 }
1278 }
1279 return;
1280 }
1281
1282 // Symbolic region?
1283 if (const SymbolicRegion *SR = dyn_cast<SymbolicRegion>(Val: baseR))
1284 IS.insert(V: SR->getSymbol());
1285
1286 // Nothing else should be done in the case when we preserve regions context.
1287 if (PreserveRegionsContents)
1288 return;
1289
1290 // Otherwise, we have a normal data region. Record that we touched the region.
1291 if (Regions)
1292 Regions->push_back(Elt: baseR);
1293
1294 if (isa<AllocaRegion, SymbolicRegion>(Val: baseR)) {
1295 // Invalidate the region by setting its default value to
1296 // conjured symbol. The type of the symbol is irrelevant.
1297 DefinedOrUnknownSVal V =
1298 svalBuilder.conjureSymbolVal(symbolTag: baseR, elem: Elem, LCtx, type: Ctx.IntTy, count: Count);
1299 B = B.addBinding(R: baseR, k: BindingKey::Default, V);
1300 return;
1301 }
1302
1303 if (!baseR->isBoundable())
1304 return;
1305
1306 const TypedValueRegion *TR = cast<TypedValueRegion>(Val: baseR);
1307 QualType T = TR->getValueType();
1308
1309 if (isInitiallyIncludedGlobalRegion(R: baseR)) {
1310 // If the region is a global and we are invalidating all globals,
1311 // erasing the entry is good enough. This causes all globals to be lazily
1312 // symbolicated from the same base symbol.
1313 return;
1314 }
1315
1316 if (T->isRecordType()) {
1317 // Invalidate the region by setting its default value to
1318 // conjured symbol. The type of the symbol is irrelevant.
1319 DefinedOrUnknownSVal V =
1320 svalBuilder.conjureSymbolVal(symbolTag: baseR, elem: Elem, LCtx, type: Ctx.IntTy, count: Count);
1321 B = B.addBinding(R: baseR, k: BindingKey::Default, V);
1322 return;
1323 }
1324
1325 if (const ArrayType *AT = Ctx.getAsArrayType(T)) {
1326 bool doNotInvalidateSuperRegion = ITraits.hasTrait(
1327 MR: baseR,
1328 IK: RegionAndSymbolInvalidationTraits::TK_DoNotInvalidateSuperRegion);
1329
1330 if (doNotInvalidateSuperRegion) {
1331 // We are not doing blank invalidation of the whole array region so we
1332 // have to manually invalidate each elements.
1333 std::optional<uint64_t> NumElements;
1334
1335 // Compute lower and upper offsets for region within array.
1336 if (const ConstantArrayType *CAT = dyn_cast<ConstantArrayType>(Val: AT))
1337 NumElements = CAT->getZExtSize();
1338 if (!NumElements) // We are not dealing with a constant size array
1339 goto conjure_default;
1340 QualType ElementTy = AT->getElementType();
1341 uint64_t ElemSize = Ctx.getTypeSize(T: ElementTy);
1342 const RegionOffset &RO = baseR->getAsOffset();
1343 const MemRegion *SuperR = baseR->getBaseRegion();
1344 if (RO.hasSymbolicOffset()) {
1345 // If base region has a symbolic offset,
1346 // we revert to invalidating the super region.
1347 if (SuperR)
1348 AddToWorkList(R: SuperR);
1349 goto conjure_default;
1350 }
1351
1352 uint64_t LowerOffset = RO.getOffset();
1353 uint64_t UpperOffset = LowerOffset + *NumElements * ElemSize;
1354 bool UpperOverflow = UpperOffset < LowerOffset;
1355
1356 // Invalidate regions which are within array boundaries,
1357 // or have a symbolic offset.
1358 if (!SuperR)
1359 goto conjure_default;
1360
1361 const ClusterBindings *C = B.lookup(K: SuperR);
1362 if (!C)
1363 goto conjure_default;
1364
1365 for (const auto &[BK, V] : *C) {
1366 std::optional<uint64_t> ROffset =
1367 BK.hasSymbolicOffset() ? std::optional<uint64_t>() : BK.getOffset();
1368
1369 // Check offset is not symbolic and within array's boundaries.
1370 // Handles arrays of 0 elements and of 0-sized elements as well.
1371 if (!ROffset ||
1372 ((*ROffset >= LowerOffset && *ROffset < UpperOffset) ||
1373 (UpperOverflow &&
1374 (*ROffset >= LowerOffset || *ROffset < UpperOffset)) ||
1375 (LowerOffset == UpperOffset && *ROffset == LowerOffset))) {
1376 B = B.removeBinding(K: BK);
1377 // Bound symbolic regions need to be invalidated for dead symbol
1378 // detection.
1379 const MemRegion *R = V.getAsRegion();
1380 if (isa_and_nonnull<SymbolicRegion>(Val: R))
1381 VisitBinding(V);
1382 }
1383 }
1384 }
1385 conjure_default:
1386 // Set the default value of the array to conjured symbol.
1387 DefinedOrUnknownSVal V = svalBuilder.conjureSymbolVal(
1388 symbolTag: baseR, elem: Elem, LCtx, type: AT->getElementType(), count: Count);
1389 B = B.addBinding(R: baseR, k: BindingKey::Default, V);
1390 return;
1391 }
1392
1393 DefinedOrUnknownSVal V =
1394 svalBuilder.conjureSymbolVal(symbolTag: baseR, elem: Elem, LCtx, type: T, count: Count);
1395 assert(SymbolManager::canSymbolicate(T) || V.isUnknown());
1396 B = B.addBinding(R: baseR, k: BindingKey::Direct, V);
1397}
1398
1399bool InvalidateRegionsWorker::isInitiallyIncludedGlobalRegion(
1400 const MemRegion *R) {
1401 switch (GlobalsFilter) {
1402 case GFK_None:
1403 return false;
1404 case GFK_SystemOnly:
1405 return isa<GlobalSystemSpaceRegion>(Val: R->getRawMemorySpace());
1406 case GFK_All:
1407 return isa<NonStaticGlobalSpaceRegion>(Val: R->getRawMemorySpace());
1408 }
1409
1410 llvm_unreachable("unknown globals filter");
1411}
1412
1413bool InvalidateRegionsWorker::includeEntireMemorySpace(const MemRegion *Base) {
1414 if (isInitiallyIncludedGlobalRegion(R: Base))
1415 return true;
1416
1417 const MemSpaceRegion *MemSpace = Base->getRawMemorySpace();
1418 return ITraits.hasTrait(MR: MemSpace,
1419 IK: RegionAndSymbolInvalidationTraits::TK_EntireMemSpace);
1420}
1421
1422RegionBindingsRef RegionStoreManager::invalidateGlobalRegion(
1423 MemRegion::Kind K, ConstCFGElementRef Elem, unsigned Count,
1424 const LocationContext *LCtx, RegionBindingsRef B,
1425 InvalidatedRegions *Invalidated) {
1426 // Bind the globals memory space to a new symbol that we will use to derive
1427 // the bindings for all globals.
1428 const GlobalsSpaceRegion *GS = MRMgr.getGlobalsRegion(K);
1429 SVal V = svalBuilder.conjureSymbolVal(
1430 /* symbolTag = */ (const void *)GS, elem: Elem, LCtx,
1431 /* type does not matter */ type: Ctx.IntTy, count: Count);
1432
1433 B = B.removeBinding(R: GS)
1434 .addBinding(K: BindingKey::Make(R: GS, k: BindingKey::Default), V);
1435
1436 // Even if there are no bindings in the global scope, we still need to
1437 // record that we touched it.
1438 if (Invalidated)
1439 Invalidated->push_back(Elt: GS);
1440
1441 return B;
1442}
1443
1444void RegionStoreManager::populateWorkList(InvalidateRegionsWorker &W,
1445 ArrayRef<SVal> Values,
1446 InvalidatedRegions *TopLevelRegions) {
1447 for (SVal V : Values) {
1448 if (auto LCS = V.getAs<nonloc::LazyCompoundVal>()) {
1449 for (SVal S : getInterestingValues(LCV: *LCS))
1450 if (const MemRegion *R = S.getAsRegion())
1451 W.AddToWorkList(R);
1452
1453 continue;
1454 }
1455
1456 if (const MemRegion *R = V.getAsRegion()) {
1457 if (TopLevelRegions)
1458 TopLevelRegions->push_back(Elt: R);
1459 W.AddToWorkList(R);
1460 continue;
1461 }
1462 }
1463}
1464
1465StoreRef RegionStoreManager::invalidateRegions(
1466 Store store, ArrayRef<SVal> Values, ConstCFGElementRef Elem, unsigned Count,
1467 const LocationContext *LCtx, const CallEvent *Call, InvalidatedSymbols &IS,
1468 RegionAndSymbolInvalidationTraits &ITraits,
1469 InvalidatedRegions *TopLevelRegions, InvalidatedRegions *Invalidated) {
1470 GlobalsFilterKind GlobalsFilter;
1471 if (Call) {
1472 if (Call->isInSystemHeader())
1473 GlobalsFilter = GFK_SystemOnly;
1474 else
1475 GlobalsFilter = GFK_All;
1476 } else {
1477 GlobalsFilter = GFK_None;
1478 }
1479
1480 RegionBindingsRef B = getRegionBindings(store);
1481 InvalidateRegionsWorker W(*this, StateMgr, B, Elem, Count, LCtx, IS, ITraits,
1482 Invalidated, GlobalsFilter);
1483
1484 // Scan the bindings and generate the clusters.
1485 W.GenerateClusters();
1486
1487 // Add the regions to the worklist.
1488 populateWorkList(W, Values, TopLevelRegions);
1489
1490 W.RunWorkList();
1491
1492 // Return the new bindings.
1493 B = W.getRegionBindings();
1494
1495 // For calls, determine which global regions should be invalidated and
1496 // invalidate them. (Note that function-static and immutable globals are never
1497 // invalidated by this.)
1498 // TODO: This could possibly be more precise with modules.
1499 switch (GlobalsFilter) {
1500 case GFK_All:
1501 B = invalidateGlobalRegion(K: MemRegion::GlobalInternalSpaceRegionKind, Elem,
1502 Count, LCtx, B, Invalidated);
1503 [[fallthrough]];
1504 case GFK_SystemOnly:
1505 B = invalidateGlobalRegion(K: MemRegion::GlobalSystemSpaceRegionKind, Elem,
1506 Count, LCtx, B, Invalidated);
1507 [[fallthrough]];
1508 case GFK_None:
1509 break;
1510 }
1511
1512 return StoreRef(B.asStore(), *this);
1513}
1514
1515//===----------------------------------------------------------------------===//
1516// Location and region casting.
1517//===----------------------------------------------------------------------===//
1518
1519/// ArrayToPointer - Emulates the "decay" of an array to a pointer
1520/// type. 'Array' represents the lvalue of the array being decayed
1521/// to a pointer, and the returned SVal represents the decayed
1522/// version of that lvalue (i.e., a pointer to the first element of
1523/// the array). This is called by ExprEngine when evaluating casts
1524/// from arrays to pointers.
1525SVal RegionStoreManager::ArrayToPointer(Loc Array, QualType T) {
1526 if (isa<loc::ConcreteInt>(Val: Array))
1527 return Array;
1528
1529 if (!isa<loc::MemRegionVal>(Val: Array))
1530 return UnknownVal();
1531
1532 const SubRegion *R =
1533 cast<SubRegion>(Val: Array.castAs<loc::MemRegionVal>().getRegion());
1534 NonLoc ZeroIdx = svalBuilder.makeZeroArrayIndex();
1535 return loc::MemRegionVal(MRMgr.getElementRegion(elementType: T, Idx: ZeroIdx, superRegion: R, Ctx));
1536}
1537
1538//===----------------------------------------------------------------------===//
1539// Loading values from regions.
1540//===----------------------------------------------------------------------===//
1541
1542SVal RegionStoreManager::getBinding(RegionBindingsConstRef B, Loc L, QualType T) {
1543 assert(!isa<UnknownVal>(L) && "location unknown");
1544 assert(!isa<UndefinedVal>(L) && "location undefined");
1545
1546 // For access to concrete addresses, return UnknownVal. Checks
1547 // for null dereferences (and similar errors) are done by checkers, not
1548 // the Store.
1549 // FIXME: We can consider lazily symbolicating such memory, but we really
1550 // should defer this when we can reason easily about symbolicating arrays
1551 // of bytes.
1552 if (L.getAs<loc::ConcreteInt>()) {
1553 return UnknownVal();
1554 }
1555 if (!L.getAs<loc::MemRegionVal>()) {
1556 return UnknownVal();
1557 }
1558
1559 const MemRegion *MR = L.castAs<loc::MemRegionVal>().getRegion();
1560
1561 if (isa<BlockDataRegion>(Val: MR)) {
1562 return UnknownVal();
1563 }
1564
1565 // Auto-detect the binding type.
1566 if (T.isNull()) {
1567 if (const auto *TVR = dyn_cast<TypedValueRegion>(Val: MR))
1568 T = TVR->getValueType();
1569 else if (const auto *TR = dyn_cast<TypedRegion>(Val: MR))
1570 T = TR->getLocationType()->getPointeeType();
1571 else if (const auto *SR = dyn_cast<SymbolicRegion>(Val: MR))
1572 T = SR->getPointeeStaticType();
1573 }
1574 assert(!T.isNull() && "Unable to auto-detect binding type!");
1575 assert(!T->isVoidType() && "Attempting to dereference a void pointer!");
1576
1577 if (!isa<TypedValueRegion>(Val: MR))
1578 MR = GetElementZeroRegion(R: cast<SubRegion>(Val: MR), T);
1579
1580 // FIXME: Perhaps this method should just take a 'const MemRegion*' argument
1581 // instead of 'Loc', and have the other Loc cases handled at a higher level.
1582 const TypedValueRegion *R = cast<TypedValueRegion>(Val: MR);
1583 QualType RTy = R->getValueType();
1584
1585 // FIXME: we do not yet model the parts of a complex type, so treat the
1586 // whole thing as "unknown".
1587 if (RTy->isAnyComplexType())
1588 return UnknownVal();
1589
1590 // FIXME: We should eventually handle funny addressing. e.g.:
1591 //
1592 // int x = ...;
1593 // int *p = &x;
1594 // char *q = (char*) p;
1595 // char c = *q; // returns the first byte of 'x'.
1596 //
1597 // Such funny addressing will occur due to layering of regions.
1598 if (RTy->isStructureOrClassType())
1599 return getBindingForStruct(B, R);
1600
1601 // FIXME: Handle unions.
1602 if (RTy->isUnionType())
1603 return createLazyBinding(B, R);
1604
1605 if (RTy->isArrayType()) {
1606 if (RTy->isConstantArrayType())
1607 return getBindingForArray(B, R);
1608 else
1609 return UnknownVal();
1610 }
1611
1612 // FIXME: handle Vector types.
1613 if (RTy->isVectorType())
1614 return UnknownVal();
1615
1616 if (const FieldRegion* FR = dyn_cast<FieldRegion>(Val: R))
1617 return svalBuilder.evalCast(V: getBindingForField(B, R: FR), CastTy: T, OriginalTy: QualType{});
1618
1619 if (const ElementRegion* ER = dyn_cast<ElementRegion>(Val: R)) {
1620 // FIXME: Here we actually perform an implicit conversion from the loaded
1621 // value to the element type. Eventually we want to compose these values
1622 // more intelligently. For example, an 'element' can encompass multiple
1623 // bound regions (e.g., several bound bytes), or could be a subset of
1624 // a larger value.
1625 return svalBuilder.evalCast(V: getBindingForElement(B, R: ER), CastTy: T, OriginalTy: QualType{});
1626 }
1627
1628 if (const ObjCIvarRegion *IVR = dyn_cast<ObjCIvarRegion>(Val: R)) {
1629 // FIXME: Here we actually perform an implicit conversion from the loaded
1630 // value to the ivar type. What we should model is stores to ivars
1631 // that blow past the extent of the ivar. If the address of the ivar is
1632 // reinterpretted, it is possible we stored a different value that could
1633 // fit within the ivar. Either we need to cast these when storing them
1634 // or reinterpret them lazily (as we do here).
1635 return svalBuilder.evalCast(V: getBindingForObjCIvar(B, R: IVR), CastTy: T, OriginalTy: QualType{});
1636 }
1637
1638 if (const VarRegion *VR = dyn_cast<VarRegion>(Val: R)) {
1639 // FIXME: Here we actually perform an implicit conversion from the loaded
1640 // value to the variable type. What we should model is stores to variables
1641 // that blow past the extent of the variable. If the address of the
1642 // variable is reinterpretted, it is possible we stored a different value
1643 // that could fit within the variable. Either we need to cast these when
1644 // storing them or reinterpret them lazily (as we do here).
1645 return svalBuilder.evalCast(V: getBindingForVar(B, R: VR), CastTy: T, OriginalTy: QualType{});
1646 }
1647
1648 const SVal *V = B.lookup(R, k: BindingKey::Direct);
1649
1650 // Check if the region has a binding.
1651 if (V)
1652 return *V;
1653
1654 // The location does not have a bound value. This means that it has
1655 // the value it had upon its creation and/or entry to the analyzed
1656 // function/method. These are either symbolic values or 'undefined'.
1657 if (isa<StackLocalsSpaceRegion>(Val: R->getRawMemorySpace())) {
1658 // All stack variables are considered to have undefined values
1659 // upon creation. All heap allocated blocks are considered to
1660 // have undefined values as well unless they are explicitly bound
1661 // to specific values.
1662 return UndefinedVal();
1663 }
1664
1665 // All other values are symbolic.
1666 return svalBuilder.getRegionValueSymbolVal(region: R);
1667}
1668
1669static QualType getUnderlyingType(const SubRegion *R) {
1670 QualType RegionTy;
1671 if (const TypedValueRegion *TVR = dyn_cast<TypedValueRegion>(Val: R))
1672 RegionTy = TVR->getValueType();
1673
1674 if (const SymbolicRegion *SR = dyn_cast<SymbolicRegion>(Val: R))
1675 RegionTy = SR->getSymbol()->getType();
1676
1677 return RegionTy;
1678}
1679
1680/// Checks to see if store \p B has a lazy binding for region \p R.
1681///
1682/// If \p AllowSubregionBindings is \c false, a lazy binding will be rejected
1683/// if there are additional bindings within \p R.
1684///
1685/// Note that unlike RegionStoreManager::findLazyBinding, this will not search
1686/// for lazy bindings for super-regions of \p R.
1687static std::optional<nonloc::LazyCompoundVal>
1688getExistingLazyBinding(SValBuilder &SVB, RegionBindingsConstRef B,
1689 const SubRegion *R, bool AllowSubregionBindings) {
1690 std::optional<SVal> V = B.getDefaultBinding(R);
1691 if (!V)
1692 return std::nullopt;
1693
1694 std::optional<nonloc::LazyCompoundVal> LCV =
1695 V->getAs<nonloc::LazyCompoundVal>();
1696 if (!LCV)
1697 return std::nullopt;
1698
1699 // If the LCV is for a subregion, the types might not match, and we shouldn't
1700 // reuse the binding.
1701 QualType RegionTy = getUnderlyingType(R);
1702 if (!RegionTy.isNull() &&
1703 !RegionTy->isVoidPointerType()) {
1704 QualType SourceRegionTy = LCV->getRegion()->getValueType();
1705 if (!SVB.getContext().hasSameUnqualifiedType(T1: RegionTy, T2: SourceRegionTy))
1706 return std::nullopt;
1707 }
1708
1709 if (!AllowSubregionBindings) {
1710 // If there are any other bindings within this region, we shouldn't reuse
1711 // the top-level binding.
1712 SmallVector<BindingPair, 16> Bindings;
1713 collectSubRegionBindings(Bindings, SVB, Cluster: *B.lookup(K: R->getBaseRegion()), Top: R,
1714 /*IncludeAllDefaultBindings=*/true);
1715 if (Bindings.size() > 1)
1716 return std::nullopt;
1717 }
1718
1719 return *LCV;
1720}
1721
1722std::pair<Store, const SubRegion *>
1723RegionStoreManager::findLazyBinding(RegionBindingsConstRef B,
1724 const SubRegion *R,
1725 const SubRegion *originalRegion) {
1726 if (originalRegion != R) {
1727 if (std::optional<nonloc::LazyCompoundVal> V =
1728 getExistingLazyBinding(SVB&: svalBuilder, B, R, AllowSubregionBindings: true))
1729 return std::make_pair(x: V->getStore(), y: V->getRegion());
1730 }
1731
1732 typedef std::pair<Store, const SubRegion *> StoreRegionPair;
1733 StoreRegionPair Result = StoreRegionPair();
1734
1735 if (const ElementRegion *ER = dyn_cast<ElementRegion>(Val: R)) {
1736 Result = findLazyBinding(B, R: cast<SubRegion>(Val: ER->getSuperRegion()),
1737 originalRegion);
1738
1739 if (Result.second)
1740 Result.second = MRMgr.getElementRegionWithSuper(ER, superRegion: Result.second);
1741
1742 } else if (const FieldRegion *FR = dyn_cast<FieldRegion>(Val: R)) {
1743 Result = findLazyBinding(B, R: cast<SubRegion>(Val: FR->getSuperRegion()),
1744 originalRegion);
1745
1746 if (Result.second)
1747 Result.second = MRMgr.getFieldRegionWithSuper(FR, superRegion: Result.second);
1748
1749 } else if (const CXXBaseObjectRegion *BaseReg =
1750 dyn_cast<CXXBaseObjectRegion>(Val: R)) {
1751 // C++ base object region is another kind of region that we should blast
1752 // through to look for lazy compound value. It is like a field region.
1753 Result = findLazyBinding(B, R: cast<SubRegion>(Val: BaseReg->getSuperRegion()),
1754 originalRegion);
1755
1756 if (Result.second)
1757 Result.second = MRMgr.getCXXBaseObjectRegionWithSuper(baseReg: BaseReg,
1758 superRegion: Result.second);
1759 }
1760
1761 return Result;
1762}
1763
1764/// This is a helper function for `getConstantValFromConstArrayInitializer`.
1765///
1766/// Return an array of extents of the declared array type.
1767///
1768/// E.g. for `int x[1][2][3];` returns { 1, 2, 3 }.
1769static SmallVector<uint64_t, 2>
1770getConstantArrayExtents(const ConstantArrayType *CAT) {
1771 assert(CAT && "ConstantArrayType should not be null");
1772 CAT = cast<ConstantArrayType>(Val: CAT->getCanonicalTypeInternal());
1773 SmallVector<uint64_t, 2> Extents;
1774 do {
1775 Extents.push_back(Elt: CAT->getZExtSize());
1776 } while ((CAT = dyn_cast<ConstantArrayType>(Val: CAT->getElementType())));
1777 return Extents;
1778}
1779
1780/// This is a helper function for `getConstantValFromConstArrayInitializer`.
1781///
1782/// Return an array of offsets from nested ElementRegions and a root base
1783/// region. The array is never empty and a base region is never null.
1784///
1785/// E.g. for `Element{Element{Element{VarRegion},1},2},3}` returns { 3, 2, 1 }.
1786/// This represents an access through indirection: `arr[1][2][3];`
1787///
1788/// \param ER The given (possibly nested) ElementRegion.
1789///
1790/// \note The result array is in the reverse order of indirection expression:
1791/// arr[1][2][3] -> { 3, 2, 1 }. This helps to provide complexity O(n), where n
1792/// is a number of indirections. It may not affect performance in real-life
1793/// code, though.
1794static std::pair<SmallVector<SVal, 2>, const MemRegion *>
1795getElementRegionOffsetsWithBase(const ElementRegion *ER) {
1796 assert(ER && "ConstantArrayType should not be null");
1797 const MemRegion *Base;
1798 SmallVector<SVal, 2> SValOffsets;
1799 do {
1800 SValOffsets.push_back(Elt: ER->getIndex());
1801 Base = ER->getSuperRegion();
1802 ER = dyn_cast<ElementRegion>(Val: Base);
1803 } while (ER);
1804 return {SValOffsets, Base};
1805}
1806
1807/// This is a helper function for `getConstantValFromConstArrayInitializer`.
1808///
1809/// Convert array of offsets from `SVal` to `uint64_t` in consideration of
1810/// respective array extents.
1811/// \param SrcOffsets [in] The array of offsets of type `SVal` in reversed
1812/// order (expectedly received from `getElementRegionOffsetsWithBase`).
1813/// \param ArrayExtents [in] The array of extents.
1814/// \param DstOffsets [out] The array of offsets of type `uint64_t`.
1815/// \returns:
1816/// - `std::nullopt` for successful convertion.
1817/// - `UndefinedVal` or `UnknownVal` otherwise. It's expected that this SVal
1818/// will be returned as a suitable value of the access operation.
1819/// which should be returned as a correct
1820///
1821/// \example:
1822/// const int arr[10][20][30] = {}; // ArrayExtents { 10, 20, 30 }
1823/// int x1 = arr[4][5][6]; // SrcOffsets { NonLoc(6), NonLoc(5), NonLoc(4) }
1824/// // DstOffsets { 4, 5, 6 }
1825/// // returns std::nullopt
1826/// int x2 = arr[42][5][-6]; // returns UndefinedVal
1827/// int x3 = arr[4][5][x2]; // returns UnknownVal
1828static std::optional<SVal>
1829convertOffsetsFromSvalToUnsigneds(const SmallVector<SVal, 2> &SrcOffsets,
1830 const SmallVector<uint64_t, 2> ArrayExtents,
1831 SmallVector<uint64_t, 2> &DstOffsets) {
1832 // Check offsets for being out of bounds.
1833 // C++20 [expr.add] 7.6.6.4 (excerpt):
1834 // If P points to an array element i of an array object x with n
1835 // elements, where i < 0 or i > n, the behavior is undefined.
1836 // Dereferencing is not allowed on the "one past the last
1837 // element", when i == n.
1838 // Example:
1839 // const int arr[3][2] = {{1, 2}, {3, 4}};
1840 // arr[0][0]; // 1
1841 // arr[0][1]; // 2
1842 // arr[0][2]; // UB
1843 // arr[1][0]; // 3
1844 // arr[1][1]; // 4
1845 // arr[1][-1]; // UB
1846 // arr[2][0]; // 0
1847 // arr[2][1]; // 0
1848 // arr[-2][0]; // UB
1849 DstOffsets.resize(N: SrcOffsets.size());
1850 auto ExtentIt = ArrayExtents.begin();
1851 auto OffsetIt = DstOffsets.begin();
1852 // Reverse `SValOffsets` to make it consistent with `ArrayExtents`.
1853 for (SVal V : llvm::reverse(C: SrcOffsets)) {
1854 if (auto CI = V.getAs<nonloc::ConcreteInt>()) {
1855 // When offset is out of array's bounds, result is UB.
1856 const llvm::APSInt &Offset = CI->getValue();
1857 if (Offset.isNegative() || Offset.uge(RHS: *(ExtentIt++)))
1858 return UndefinedVal();
1859 // Store index in a reversive order.
1860 *(OffsetIt++) = Offset.getZExtValue();
1861 continue;
1862 }
1863 // Symbolic index presented. Return Unknown value.
1864 // FIXME: We also need to take ElementRegions with symbolic indexes into
1865 // account.
1866 return UnknownVal();
1867 }
1868 return std::nullopt;
1869}
1870
1871std::optional<SVal> RegionStoreManager::getConstantValFromConstArrayInitializer(
1872 RegionBindingsConstRef B, const ElementRegion *R) {
1873 assert(R && "ElementRegion should not be null");
1874
1875 // Treat an n-dimensional array.
1876 SmallVector<SVal, 2> SValOffsets;
1877 const MemRegion *Base;
1878 std::tie(args&: SValOffsets, args&: Base) = getElementRegionOffsetsWithBase(ER: R);
1879 const VarRegion *VR = dyn_cast<VarRegion>(Val: Base);
1880 if (!VR)
1881 return std::nullopt;
1882
1883 assert(!SValOffsets.empty() && "getElementRegionOffsets guarantees the "
1884 "offsets vector is not empty.");
1885
1886 // Check if the containing array has an initialized value that we can trust.
1887 // We can trust a const value or a value of a global initializer in main().
1888 const VarDecl *VD = VR->getDecl();
1889 if (!VD->getType().isConstQualified() &&
1890 !R->getElementType().isConstQualified() &&
1891 (!B.isMainAnalysis() || !VD->hasGlobalStorage()))
1892 return std::nullopt;
1893
1894 // Array's declaration should have `ConstantArrayType` type, because only this
1895 // type contains an array extent. It may happen that array type can be of
1896 // `IncompleteArrayType` type. To get the declaration of `ConstantArrayType`
1897 // type, we should find the declaration in the redeclarations chain that has
1898 // the initialization expression.
1899 // NOTE: `getAnyInitializer` has an out-parameter, which returns a new `VD`
1900 // from which an initializer is obtained. We replace current `VD` with the new
1901 // `VD`. If the return value of the function is null than `VD` won't be
1902 // replaced.
1903 const Expr *Init = VD->getAnyInitializer(D&: VD);
1904 // NOTE: If `Init` is non-null, then a new `VD` is non-null for sure. So check
1905 // `Init` for null only and don't worry about the replaced `VD`.
1906 if (!Init)
1907 return std::nullopt;
1908
1909 // Array's declaration should have ConstantArrayType type, because only this
1910 // type contains an array extent.
1911 const ConstantArrayType *CAT = Ctx.getAsConstantArrayType(T: VD->getType());
1912 if (!CAT)
1913 return std::nullopt;
1914
1915 // Get array extents.
1916 SmallVector<uint64_t, 2> Extents = getConstantArrayExtents(CAT);
1917
1918 // The number of offsets should equal to the numbers of extents,
1919 // otherwise wrong type punning occurred. For instance:
1920 // int arr[1][2][3];
1921 // auto ptr = (int(*)[42])arr;
1922 // auto x = ptr[4][2]; // UB
1923 // FIXME: Should return UndefinedVal.
1924 if (SValOffsets.size() != Extents.size())
1925 return std::nullopt;
1926
1927 SmallVector<uint64_t, 2> ConcreteOffsets;
1928 if (std::optional<SVal> V = convertOffsetsFromSvalToUnsigneds(
1929 SrcOffsets: SValOffsets, ArrayExtents: Extents, DstOffsets&: ConcreteOffsets))
1930 return *V;
1931
1932 // Handle InitListExpr.
1933 // Example:
1934 // const char arr[4][2] = { { 1, 2 }, { 3 }, 4, 5 };
1935 if (const auto *ILE = dyn_cast<InitListExpr>(Val: Init))
1936 return getSValFromInitListExpr(ILE, ConcreteOffsets, ElemT: R->getElementType());
1937
1938 // Handle StringLiteral.
1939 // Example:
1940 // const char arr[] = "abc";
1941 if (const auto *SL = dyn_cast<StringLiteral>(Val: Init))
1942 return getSValFromStringLiteral(SL, Offset: ConcreteOffsets.front(),
1943 ElemT: R->getElementType());
1944
1945 // FIXME: Handle CompoundLiteralExpr.
1946
1947 return std::nullopt;
1948}
1949
1950/// Returns an SVal, if possible, for the specified position of an
1951/// initialization list.
1952///
1953/// \param ILE The given initialization list.
1954/// \param Offsets The array of unsigned offsets. E.g. for the expression
1955/// `int x = arr[1][2][3];` an array should be { 1, 2, 3 }.
1956/// \param ElemT The type of the result SVal expression.
1957/// \return Optional SVal for the particular position in the initialization
1958/// list. E.g. for the list `{{1, 2},[3, 4],{5, 6}, {}}` offsets:
1959/// - {1, 1} returns SVal{4}, because it's the second position in the second
1960/// sublist;
1961/// - {3, 0} returns SVal{0}, because there's no explicit value at this
1962/// position in the sublist.
1963///
1964/// NOTE: Inorder to get a valid SVal, a caller shall guarantee valid offsets
1965/// for the given initialization list. Otherwise SVal can be an equivalent to 0
1966/// or lead to assertion.
1967std::optional<SVal> RegionStoreManager::getSValFromInitListExpr(
1968 const InitListExpr *ILE, const SmallVector<uint64_t, 2> &Offsets,
1969 QualType ElemT) {
1970 assert(ILE && "InitListExpr should not be null");
1971
1972 for (uint64_t Offset : Offsets) {
1973 // C++20 [dcl.init.string] 9.4.2.1:
1974 // An array of ordinary character type [...] can be initialized by [...]
1975 // an appropriately-typed string-literal enclosed in braces.
1976 // Example:
1977 // const char arr[] = { "abc" };
1978 if (ILE->isStringLiteralInit())
1979 if (const auto *SL = dyn_cast<StringLiteral>(Val: ILE->getInit(Init: 0)))
1980 return getSValFromStringLiteral(SL, Offset, ElemT);
1981
1982 // C++20 [expr.add] 9.4.17.5 (excerpt):
1983 // i-th array element is value-initialized for each k < i ≤ n,
1984 // where k is an expression-list size and n is an array extent.
1985 if (Offset >= ILE->getNumInits())
1986 return svalBuilder.makeZeroVal(type: ElemT);
1987
1988 const Expr *E = ILE->getInit(Init: Offset);
1989 const auto *IL = dyn_cast<InitListExpr>(Val: E);
1990 if (!IL)
1991 // Return a constant value, if it is presented.
1992 // FIXME: Support other SVals.
1993 return svalBuilder.getConstantVal(E);
1994
1995 // Go to the nested initializer list.
1996 ILE = IL;
1997 }
1998
1999 assert(ILE);
2000
2001 // FIXME: Unhandeled InitListExpr sub-expression, possibly constructing an
2002 // enum?
2003 return std::nullopt;
2004}
2005
2006/// Returns an SVal, if possible, for the specified position in a string
2007/// literal.
2008///
2009/// \param SL The given string literal.
2010/// \param Offset The unsigned offset. E.g. for the expression
2011/// `char x = str[42];` an offset should be 42.
2012/// E.g. for the string "abc" offset:
2013/// - 1 returns SVal{b}, because it's the second position in the string.
2014/// - 42 returns SVal{0}, because there's no explicit value at this
2015/// position in the string.
2016/// \param ElemT The type of the result SVal expression.
2017///
2018/// NOTE: We return `0` for every offset >= the literal length for array
2019/// declarations, like:
2020/// const char str[42] = "123"; // Literal length is 4.
2021/// char c = str[41]; // Offset is 41.
2022/// FIXME: Nevertheless, we can't do the same for pointer declaraions, like:
2023/// const char * const str = "123"; // Literal length is 4.
2024/// char c = str[41]; // Offset is 41. Returns `0`, but Undef
2025/// // expected.
2026/// It should be properly handled before reaching this point.
2027/// The main problem is that we can't distinguish between these declarations,
2028/// because in case of array we can get the Decl from VarRegion, but in case
2029/// of pointer the region is a StringRegion, which doesn't contain a Decl.
2030/// Possible solution could be passing an array extent along with the offset.
2031SVal RegionStoreManager::getSValFromStringLiteral(const StringLiteral *SL,
2032 uint64_t Offset,
2033 QualType ElemT) {
2034 assert(SL && "StringLiteral should not be null");
2035 // C++20 [dcl.init.string] 9.4.2.3:
2036 // If there are fewer initializers than there are array elements, each
2037 // element not explicitly initialized shall be zero-initialized [dcl.init].
2038 uint32_t Code = (Offset >= SL->getLength()) ? 0 : SL->getCodeUnit(i: Offset);
2039 return svalBuilder.makeIntVal(integer: Code, type: ElemT);
2040}
2041
2042static std::optional<SVal> getDerivedSymbolForBinding(
2043 RegionBindingsConstRef B, const TypedValueRegion *BaseRegion,
2044 const TypedValueRegion *SubReg, const ASTContext &Ctx, SValBuilder &SVB) {
2045 assert(BaseRegion);
2046 QualType BaseTy = BaseRegion->getValueType();
2047 QualType Ty = SubReg->getValueType();
2048 if (BaseTy->isScalarType() && Ty->isScalarType()) {
2049 if (Ctx.getTypeSizeInChars(T: BaseTy) >= Ctx.getTypeSizeInChars(T: Ty)) {
2050 if (const std::optional<SVal> &ParentValue =
2051 B.getDirectBinding(R: BaseRegion)) {
2052 if (SymbolRef ParentValueAsSym = ParentValue->getAsSymbol())
2053 return SVB.getDerivedRegionValueSymbolVal(parentSymbol: ParentValueAsSym, region: SubReg);
2054
2055 if (ParentValue->isUndef())
2056 return UndefinedVal();
2057
2058 // Other cases: give up. We are indexing into a larger object
2059 // that has some value, but we don't know how to handle that yet.
2060 return UnknownVal();
2061 }
2062 }
2063 }
2064 return std::nullopt;
2065}
2066
2067SVal RegionStoreManager::getBindingForElement(RegionBindingsConstRef B,
2068 const ElementRegion* R) {
2069 // Check if the region has a binding.
2070 if (const std::optional<SVal> &V = B.getDirectBinding(R))
2071 return *V;
2072
2073 const MemRegion* superR = R->getSuperRegion();
2074
2075 // Check if the region is an element region of a string literal.
2076 if (const StringRegion *StrR = dyn_cast<StringRegion>(Val: superR)) {
2077 // FIXME: Handle loads from strings where the literal is treated as
2078 // an integer, e.g., *((unsigned int*)"hello"). Such loads are UB according
2079 // to C++20 7.2.1.11 [basic.lval].
2080 QualType T = Ctx.getAsArrayType(T: StrR->getValueType())->getElementType();
2081 if (!Ctx.hasSameUnqualifiedType(T1: T, T2: R->getElementType()))
2082 return UnknownVal();
2083 if (const auto CI = R->getIndex().getAs<nonloc::ConcreteInt>()) {
2084 const llvm::APSInt &Idx = CI->getValue();
2085 if (Idx < 0)
2086 return UndefinedVal();
2087 const StringLiteral *SL = StrR->getStringLiteral();
2088 return getSValFromStringLiteral(SL, Offset: Idx.getZExtValue(), ElemT: T);
2089 }
2090 } else if (isa<ElementRegion, VarRegion>(Val: superR)) {
2091 if (std::optional<SVal> V = getConstantValFromConstArrayInitializer(B, R))
2092 return *V;
2093 }
2094
2095 // Check for loads from a code text region. For such loads, just give up.
2096 if (isa<CodeTextRegion>(Val: superR))
2097 return UnknownVal();
2098
2099 // Handle the case where we are indexing into a larger scalar object.
2100 // For example, this handles:
2101 // int x = ...
2102 // char *y = &x;
2103 // return *y;
2104 // FIXME: This is a hack, and doesn't do anything really intelligent yet.
2105 const RegionRawOffset &O = R->getAsArrayOffset();
2106
2107 // If we cannot reason about the offset, return an unknown value.
2108 if (!O.getRegion())
2109 return UnknownVal();
2110
2111 if (const TypedValueRegion *baseR = dyn_cast<TypedValueRegion>(Val: O.getRegion()))
2112 if (auto V = getDerivedSymbolForBinding(B, BaseRegion: baseR, SubReg: R, Ctx, SVB&: svalBuilder))
2113 return *V;
2114
2115 return getBindingForFieldOrElementCommon(B, R, Ty: R->getElementType());
2116}
2117
2118SVal RegionStoreManager::getBindingForField(RegionBindingsConstRef B,
2119 const FieldRegion* R) {
2120
2121 // Check if the region has a binding.
2122 if (const std::optional<SVal> &V = B.getDirectBinding(R))
2123 return *V;
2124
2125 // If the containing record was initialized, try to get its constant value.
2126 const FieldDecl *FD = R->getDecl();
2127 QualType Ty = FD->getType();
2128 const MemRegion* superR = R->getSuperRegion();
2129 if (const auto *VR = dyn_cast<VarRegion>(Val: superR)) {
2130 const VarDecl *VD = VR->getDecl();
2131 QualType RecordVarTy = VD->getType();
2132 unsigned Index = FD->getFieldIndex();
2133 // Either the record variable or the field has an initializer that we can
2134 // trust. We trust initializers of constants and, additionally, respect
2135 // initializers of globals when analyzing main().
2136 if (RecordVarTy.isConstQualified() || Ty.isConstQualified() ||
2137 (B.isMainAnalysis() && VD->hasGlobalStorage()))
2138 if (const Expr *Init = VD->getAnyInitializer())
2139 if (const auto *InitList = dyn_cast<InitListExpr>(Val: Init)) {
2140 if (Index < InitList->getNumInits()) {
2141 if (const Expr *FieldInit = InitList->getInit(Init: Index))
2142 if (std::optional<SVal> V = svalBuilder.getConstantVal(E: FieldInit))
2143 return *V;
2144 } else {
2145 return svalBuilder.makeZeroVal(type: Ty);
2146 }
2147 }
2148 }
2149
2150 // Handle the case where we are accessing into a larger scalar object.
2151 // For example, this handles:
2152 // struct header {
2153 // unsigned a : 1;
2154 // unsigned b : 1;
2155 // };
2156 // struct parse_t {
2157 // unsigned bits0 : 1;
2158 // unsigned bits2 : 2; // <-- header
2159 // unsigned bits4 : 4;
2160 // };
2161 // int parse(parse_t *p) {
2162 // unsigned copy = p->bits2;
2163 // header *bits = (header *)&copy;
2164 // return bits->b; <-- here
2165 // }
2166 if (const auto *Base = dyn_cast<TypedValueRegion>(Val: R->getBaseRegion()))
2167 if (auto V = getDerivedSymbolForBinding(B, BaseRegion: Base, SubReg: R, Ctx, SVB&: svalBuilder))
2168 return *V;
2169
2170 return getBindingForFieldOrElementCommon(B, R, Ty);
2171}
2172
2173std::optional<SVal> RegionStoreManager::getBindingForDerivedDefaultValue(
2174 RegionBindingsConstRef B, const MemRegion *superR,
2175 const TypedValueRegion *R, QualType Ty) {
2176
2177 if (const std::optional<SVal> &D = B.getDefaultBinding(R: superR)) {
2178 SVal val = *D;
2179 if (SymbolRef parentSym = val.getAsSymbol())
2180 return svalBuilder.getDerivedRegionValueSymbolVal(parentSymbol: parentSym, region: R);
2181
2182 if (val.isZeroConstant())
2183 return svalBuilder.makeZeroVal(type: Ty);
2184
2185 if (val.isUnknownOrUndef())
2186 return val;
2187
2188 // Lazy bindings are usually handled through getExistingLazyBinding().
2189 // We should unify these two code paths at some point.
2190 if (isa<nonloc::LazyCompoundVal, nonloc::CompoundVal>(Val: val))
2191 return val;
2192
2193 llvm_unreachable("Unknown default value");
2194 }
2195
2196 return std::nullopt;
2197}
2198
2199SVal RegionStoreManager::getLazyBinding(const SubRegion *LazyBindingRegion,
2200 RegionBindingsRef LazyBinding) {
2201 SVal Result;
2202 if (const ElementRegion *ER = dyn_cast<ElementRegion>(Val: LazyBindingRegion))
2203 Result = getBindingForElement(B: LazyBinding, R: ER);
2204 else
2205 Result = getBindingForField(B: LazyBinding,
2206 R: cast<FieldRegion>(Val: LazyBindingRegion));
2207
2208 // FIXME: This is a hack to deal with RegionStore's inability to distinguish a
2209 // default value for /part/ of an aggregate from a default value for the
2210 // /entire/ aggregate. The most common case of this is when struct Outer
2211 // has as its first member a struct Inner, which is copied in from a stack
2212 // variable. In this case, even if the Outer's default value is symbolic, 0,
2213 // or unknown, it gets overridden by the Inner's default value of undefined.
2214 //
2215 // This is a general problem -- if the Inner is zero-initialized, the Outer
2216 // will now look zero-initialized. The proper way to solve this is with a
2217 // new version of RegionStore that tracks the extent of a binding as well
2218 // as the offset.
2219 //
2220 // This hack only takes care of the undefined case because that can very
2221 // quickly result in a warning.
2222 if (Result.isUndef())
2223 Result = UnknownVal();
2224
2225 return Result;
2226}
2227
2228SVal
2229RegionStoreManager::getBindingForFieldOrElementCommon(RegionBindingsConstRef B,
2230 const TypedValueRegion *R,
2231 QualType Ty) {
2232
2233 // At this point we have already checked in either getBindingForElement or
2234 // getBindingForField if 'R' has a direct binding.
2235
2236 // Lazy binding?
2237 Store lazyBindingStore = nullptr;
2238 const SubRegion *lazyBindingRegion = nullptr;
2239 std::tie(args&: lazyBindingStore, args&: lazyBindingRegion) = findLazyBinding(B, R, originalRegion: R);
2240 if (lazyBindingRegion)
2241 return getLazyBinding(LazyBindingRegion: lazyBindingRegion,
2242 LazyBinding: getRegionBindings(store: lazyBindingStore));
2243
2244 // Record whether or not we see a symbolic index. That can completely
2245 // be out of scope of our lookup.
2246 bool hasSymbolicIndex = false;
2247
2248 // FIXME: This is a hack to deal with RegionStore's inability to distinguish a
2249 // default value for /part/ of an aggregate from a default value for the
2250 // /entire/ aggregate. The most common case of this is when struct Outer
2251 // has as its first member a struct Inner, which is copied in from a stack
2252 // variable. In this case, even if the Outer's default value is symbolic, 0,
2253 // or unknown, it gets overridden by the Inner's default value of undefined.
2254 //
2255 // This is a general problem -- if the Inner is zero-initialized, the Outer
2256 // will now look zero-initialized. The proper way to solve this is with a
2257 // new version of RegionStore that tracks the extent of a binding as well
2258 // as the offset.
2259 //
2260 // This hack only takes care of the undefined case because that can very
2261 // quickly result in a warning.
2262 bool hasPartialLazyBinding = false;
2263
2264 const SubRegion *SR = R;
2265 while (SR) {
2266 const MemRegion *Base = SR->getSuperRegion();
2267 if (std::optional<SVal> D =
2268 getBindingForDerivedDefaultValue(B, superR: Base, R, Ty)) {
2269 if (D->getAs<nonloc::LazyCompoundVal>()) {
2270 hasPartialLazyBinding = true;
2271 break;
2272 }
2273
2274 return *D;
2275 }
2276
2277 if (const ElementRegion *ER = dyn_cast<ElementRegion>(Val: Base)) {
2278 NonLoc index = ER->getIndex();
2279 if (!index.isConstant())
2280 hasSymbolicIndex = true;
2281 }
2282
2283 // If our super region is a field or element itself, walk up the region
2284 // hierarchy to see if there is a default value installed in an ancestor.
2285 SR = dyn_cast<SubRegion>(Val: Base);
2286 }
2287
2288 if (isa<StackLocalsSpaceRegion>(Val: R->getRawMemorySpace())) {
2289 if (isa<ElementRegion>(Val: R)) {
2290 // Currently we don't reason specially about Clang-style vectors. Check
2291 // if superR is a vector and if so return Unknown.
2292 if (const TypedValueRegion *typedSuperR =
2293 dyn_cast<TypedValueRegion>(Val: R->getSuperRegion())) {
2294 if (typedSuperR->getValueType()->isVectorType())
2295 return UnknownVal();
2296 }
2297 }
2298
2299 // FIXME: We also need to take ElementRegions with symbolic indexes into
2300 // account. This case handles both directly accessing an ElementRegion
2301 // with a symbolic offset, but also fields within an element with
2302 // a symbolic offset.
2303 if (hasSymbolicIndex)
2304 return UnknownVal();
2305
2306 // Additionally allow introspection of a block's internal layout.
2307 // Try to get direct binding if all other attempts failed thus far.
2308 // Else, return UndefinedVal()
2309 if (!hasPartialLazyBinding && !isa<BlockDataRegion>(Val: R->getBaseRegion())) {
2310 if (const std::optional<SVal> &V = B.getDefaultBinding(R))
2311 return *V;
2312 return UndefinedVal();
2313 }
2314 }
2315
2316 // All other values are symbolic.
2317 return svalBuilder.getRegionValueSymbolVal(region: R);
2318}
2319
2320SVal RegionStoreManager::getBindingForObjCIvar(RegionBindingsConstRef B,
2321 const ObjCIvarRegion* R) {
2322 // Check if the region has a binding.
2323 if (const std::optional<SVal> &V = B.getDirectBinding(R))
2324 return *V;
2325
2326 const MemRegion *superR = R->getSuperRegion();
2327
2328 // Check if the super region has a default binding.
2329 if (const std::optional<SVal> &V = B.getDefaultBinding(R: superR)) {
2330 if (SymbolRef parentSym = V->getAsSymbol())
2331 return svalBuilder.getDerivedRegionValueSymbolVal(parentSymbol: parentSym, region: R);
2332
2333 // Other cases: give up.
2334 return UnknownVal();
2335 }
2336
2337 return getBindingForLazySymbol(R);
2338}
2339
2340SVal RegionStoreManager::getBindingForVar(RegionBindingsConstRef B,
2341 const VarRegion *R) {
2342
2343 // Check if the region has a binding.
2344 if (std::optional<SVal> V = B.getDirectBinding(R))
2345 return *V;
2346
2347 if (std::optional<SVal> V = B.getDefaultBinding(R))
2348 return *V;
2349
2350 // Lazily derive a value for the VarRegion.
2351 const VarDecl *VD = R->getDecl();
2352 const MemSpaceRegion *MS = R->getRawMemorySpace();
2353
2354 // Arguments are always symbolic.
2355 if (isa<StackArgumentsSpaceRegion>(Val: MS))
2356 return svalBuilder.getRegionValueSymbolVal(region: R);
2357
2358 // Is 'VD' declared constant? If so, retrieve the constant value.
2359 if (VD->getType().isConstQualified()) {
2360 if (const Expr *Init = VD->getAnyInitializer()) {
2361 if (std::optional<SVal> V = svalBuilder.getConstantVal(E: Init))
2362 return *V;
2363
2364 // If the variable is const qualified and has an initializer but
2365 // we couldn't evaluate initializer to a value, treat the value as
2366 // unknown.
2367 return UnknownVal();
2368 }
2369 }
2370
2371 // This must come after the check for constants because closure-captured
2372 // constant variables may appear in UnknownSpaceRegion.
2373 if (isa<UnknownSpaceRegion>(Val: MS))
2374 return svalBuilder.getRegionValueSymbolVal(region: R);
2375
2376 if (isa<GlobalsSpaceRegion>(Val: MS)) {
2377 QualType T = VD->getType();
2378
2379 // If we're in main(), then global initializers have not become stale yet.
2380 if (B.isMainAnalysis())
2381 if (const Expr *Init = VD->getAnyInitializer())
2382 if (std::optional<SVal> V = svalBuilder.getConstantVal(E: Init))
2383 return *V;
2384
2385 // Function-scoped static variables are default-initialized to 0; if they
2386 // have an initializer, it would have been processed by now.
2387 // FIXME: This is only true when we're starting analysis from main().
2388 // We're losing a lot of coverage here.
2389 if (isa<StaticGlobalSpaceRegion>(Val: MS))
2390 return svalBuilder.makeZeroVal(type: T);
2391
2392 if (std::optional<SVal> V = getBindingForDerivedDefaultValue(B, superR: MS, R, Ty: T)) {
2393 assert(!V->getAs<nonloc::LazyCompoundVal>());
2394 return *V;
2395 }
2396
2397 return svalBuilder.getRegionValueSymbolVal(region: R);
2398 }
2399
2400 return UndefinedVal();
2401}
2402
2403SVal RegionStoreManager::getBindingForLazySymbol(const TypedValueRegion *R) {
2404 // All other values are symbolic.
2405 return svalBuilder.getRegionValueSymbolVal(region: R);
2406}
2407
2408const RegionStoreManager::SValListTy &
2409RegionStoreManager::getInterestingValues(nonloc::LazyCompoundVal LCV) {
2410 // First, check the cache.
2411 LazyBindingsMapTy::iterator I = LazyBindingsMap.find(Val: LCV.getCVData());
2412 if (I != LazyBindingsMap.end())
2413 return I->second;
2414
2415 // If we don't have a list of values cached, start constructing it.
2416 SValListTy List;
2417
2418 const SubRegion *LazyR = LCV.getRegion();
2419 RegionBindingsRef B = getRegionBindings(store: LCV.getStore());
2420
2421 // If this region had /no/ bindings at the time, there are no interesting
2422 // values to return.
2423 const ClusterBindings *Cluster = B.lookup(K: LazyR->getBaseRegion());
2424 if (!Cluster)
2425 return (LazyBindingsMap[LCV.getCVData()] = std::move(List));
2426
2427 SmallVector<BindingPair, 32> Bindings;
2428 collectSubRegionBindings(Bindings, SVB&: svalBuilder, Cluster: *Cluster, Top: LazyR,
2429 /*IncludeAllDefaultBindings=*/true);
2430 for (SVal V : llvm::make_second_range(c&: Bindings)) {
2431 if (V.isUnknownOrUndef() || V.isConstant())
2432 continue;
2433
2434 if (auto InnerLCV = V.getAs<nonloc::LazyCompoundVal>()) {
2435 const SValListTy &InnerList = getInterestingValues(LCV: *InnerLCV);
2436 llvm::append_range(C&: List, R: InnerList);
2437 }
2438
2439 List.push_back(x: V);
2440 }
2441
2442 return (LazyBindingsMap[LCV.getCVData()] = std::move(List));
2443}
2444
2445NonLoc RegionStoreManager::createLazyBinding(RegionBindingsConstRef B,
2446 const TypedValueRegion *R) {
2447 if (std::optional<nonloc::LazyCompoundVal> V =
2448 getExistingLazyBinding(SVB&: svalBuilder, B, R, AllowSubregionBindings: false))
2449 return *V;
2450
2451 return svalBuilder.makeLazyCompoundVal(store: StoreRef(B.asStore(), *this), region: R);
2452}
2453
2454SVal RegionStoreManager::getBindingForStruct(RegionBindingsConstRef B,
2455 const TypedValueRegion *R) {
2456 const RecordDecl *RD = R->getValueType()->castAs<RecordType>()->getDecl();
2457 if (!RD->getDefinition())
2458 return UnknownVal();
2459
2460 // We also create a LCV for copying empty structs because then the store
2461 // behavior doesn't depend on the struct layout.
2462 // This way even an empty struct can carry taint, no matter if creduce drops
2463 // the last field member or not.
2464
2465 // Try to avoid creating a LCV if it would anyways just refer to a single
2466 // default binding.
2467 if (std::optional<SVal> Val = getUniqueDefaultBinding(B, R))
2468 return *Val;
2469 return createLazyBinding(B, R);
2470}
2471
2472SVal RegionStoreManager::getBindingForArray(RegionBindingsConstRef B,
2473 const TypedValueRegion *R) {
2474 assert(Ctx.getAsConstantArrayType(R->getValueType()) &&
2475 "Only constant array types can have compound bindings.");
2476
2477 return createLazyBinding(B, R);
2478}
2479
2480bool RegionStoreManager::includedInBindings(Store store,
2481 const MemRegion *region) const {
2482 RegionBindingsRef B = getRegionBindings(store);
2483 region = region->getBaseRegion();
2484
2485 // Quick path: if the base is the head of a cluster, the region is live.
2486 if (B.lookup(K: region))
2487 return true;
2488
2489 // Slow path: if the region is the VALUE of any binding, it is live.
2490 for (RegionBindingsRef::iterator RI = B.begin(), RE = B.end(); RI != RE; ++RI) {
2491 const ClusterBindings &Cluster = RI.getData();
2492 for (ClusterBindings::iterator CI = Cluster.begin(), CE = Cluster.end();
2493 CI != CE; ++CI) {
2494 SVal D = CI.getData();
2495 if (const MemRegion *R = D.getAsRegion())
2496 if (R->getBaseRegion() == region)
2497 return true;
2498 }
2499 }
2500
2501 return false;
2502}
2503
2504//===----------------------------------------------------------------------===//
2505// Binding values to regions.
2506//===----------------------------------------------------------------------===//
2507
2508StoreRef RegionStoreManager::killBinding(Store ST, Loc L) {
2509 if (std::optional<loc::MemRegionVal> LV = L.getAs<loc::MemRegionVal>())
2510 if (const MemRegion* R = LV->getRegion())
2511 return StoreRef(getRegionBindings(store: ST)
2512 .removeBinding(R)
2513 .asImmutableMap()
2514 .getRootWithoutRetain(),
2515 *this);
2516
2517 return StoreRef(ST, *this);
2518}
2519
2520LimitedRegionBindingsRef
2521RegionStoreManager::bind(LimitedRegionBindingsConstRef B, Loc L, SVal V) {
2522 llvm::TimeTraceScope TimeScope("RegionStoreManager::bind",
2523 [&L]() { return locDescr(L); });
2524
2525 if (B.hasExhaustedBindingLimit())
2526 return B.withValuesEscaped(V);
2527
2528 // We only care about region locations.
2529 auto MemRegVal = L.getAs<loc::MemRegionVal>();
2530 if (!MemRegVal)
2531 return B;
2532
2533 const MemRegion *R = MemRegVal->getRegion();
2534
2535 // Binding directly to a symbolic region should be treated as binding
2536 // to element 0.
2537 if (const auto *SymReg = dyn_cast<SymbolicRegion>(Val: R)) {
2538 QualType Ty = SymReg->getPointeeStaticType();
2539 if (Ty->isVoidType())
2540 Ty = StateMgr.getContext().CharTy;
2541 R = GetElementZeroRegion(R: SymReg, T: Ty);
2542 }
2543
2544 // Check if the region is a struct region.
2545 if (const TypedValueRegion* TR = dyn_cast<TypedValueRegion>(Val: R)) {
2546 QualType Ty = TR->getValueType();
2547 if (Ty->isArrayType())
2548 return bindArray(B, R: TR, V);
2549 if (Ty->isStructureOrClassType())
2550 return bindStruct(B, R: TR, V);
2551 if (Ty->isVectorType())
2552 return bindVector(B, R: TR, V);
2553 if (Ty->isUnionType())
2554 return bindAggregate(B, R: TR, DefaultVal: V);
2555 }
2556
2557 assert((!isa<CXXThisRegion>(R) || !B.lookup(R)) &&
2558 "'this' pointer is not an l-value and is not assignable");
2559
2560 // Clear out bindings that may overlap with this binding.
2561 auto NewB = removeSubRegionBindings(B, Top: cast<SubRegion>(Val: R));
2562
2563 // LazyCompoundVals should be always bound as 'default' bindings.
2564 auto KeyKind = isa<nonloc::LazyCompoundVal>(Val: V) ? BindingKey::Default
2565 : BindingKey::Direct;
2566 return NewB.addBinding(K: BindingKey::Make(R, k: KeyKind), V);
2567}
2568
2569LimitedRegionBindingsRef
2570RegionStoreManager::setImplicitDefaultValue(LimitedRegionBindingsConstRef B,
2571 const MemRegion *R, QualType T) {
2572 if (B.hasExhaustedBindingLimit())
2573 return B;
2574
2575 SVal V;
2576
2577 if (Loc::isLocType(T))
2578 V = svalBuilder.makeNullWithType(type: T);
2579 else if (T->isIntegralOrEnumerationType())
2580 V = svalBuilder.makeZeroVal(type: T);
2581 else if (T->isStructureOrClassType() || T->isArrayType()) {
2582 // Set the default value to a zero constant when it is a structure
2583 // or array. The type doesn't really matter.
2584 V = svalBuilder.makeZeroVal(type: Ctx.IntTy);
2585 }
2586 else {
2587 // We can't represent values of this type, but we still need to set a value
2588 // to record that the region has been initialized.
2589 // If this assertion ever fires, a new case should be added above -- we
2590 // should know how to default-initialize any value we can symbolicate.
2591 assert(!SymbolManager::canSymbolicate(T) && "This type is representable");
2592 V = UnknownVal();
2593 }
2594
2595 return B.addBinding(R, k: BindingKey::Default, V);
2596}
2597
2598std::optional<LimitedRegionBindingsRef> RegionStoreManager::tryBindSmallArray(
2599 LimitedRegionBindingsConstRef B, const TypedValueRegion *R,
2600 const ArrayType *AT, nonloc::LazyCompoundVal LCV) {
2601 if (B.hasExhaustedBindingLimit())
2602 return B.withValuesEscaped(V: LCV);
2603
2604 auto CAT = dyn_cast<ConstantArrayType>(Val: AT);
2605
2606 // If we don't know the size, create a lazyCompoundVal instead.
2607 if (!CAT)
2608 return std::nullopt;
2609
2610 QualType Ty = CAT->getElementType();
2611 if (!(Ty->isScalarType() || Ty->isReferenceType()))
2612 return std::nullopt;
2613
2614 // If the array is too big, create a LCV instead.
2615 uint64_t ArrSize = CAT->getLimitedSize();
2616 if (ArrSize > SmallArrayLimit)
2617 return std::nullopt;
2618
2619 LimitedRegionBindingsRef NewB = B;
2620
2621 for (uint64_t i = 0; i < ArrSize; ++i) {
2622 auto Idx = svalBuilder.makeArrayIndex(idx: i);
2623 const ElementRegion *SrcER =
2624 MRMgr.getElementRegion(elementType: Ty, Idx, superRegion: LCV.getRegion(), Ctx);
2625 SVal V = getBindingForElement(B: getRegionBindings(store: LCV.getStore()), R: SrcER);
2626
2627 const ElementRegion *DstER = MRMgr.getElementRegion(elementType: Ty, Idx, superRegion: R, Ctx);
2628 NewB = bind(B: NewB, L: loc::MemRegionVal(DstER), V);
2629 }
2630
2631 return NewB;
2632}
2633
2634LimitedRegionBindingsRef
2635RegionStoreManager::bindArray(LimitedRegionBindingsConstRef B,
2636 const TypedValueRegion *R, SVal Init) {
2637 llvm::TimeTraceScope TimeScope("RegionStoreManager::bindArray",
2638 [R]() { return R->getDescriptiveName(); });
2639 if (B.hasExhaustedBindingLimit())
2640 return B.withValuesEscaped(V: Init);
2641
2642 const ArrayType *AT =cast<ArrayType>(Val: Ctx.getCanonicalType(T: R->getValueType()));
2643 QualType ElementTy = AT->getElementType();
2644 std::optional<uint64_t> Size;
2645
2646 if (const ConstantArrayType* CAT = dyn_cast<ConstantArrayType>(Val: AT))
2647 Size = CAT->getZExtSize();
2648
2649 // Check if the init expr is a literal. If so, bind the rvalue instead.
2650 // FIXME: It's not responsibility of the Store to transform this lvalue
2651 // to rvalue. ExprEngine or maybe even CFG should do this before binding.
2652 if (std::optional<loc::MemRegionVal> MRV = Init.getAs<loc::MemRegionVal>()) {
2653 SVal V = getBinding(S: B.asStore(), L: *MRV, T: R->getValueType());
2654 return bindAggregate(B, R, DefaultVal: V);
2655 }
2656
2657 // Handle lazy compound values.
2658 if (std::optional LCV = Init.getAs<nonloc::LazyCompoundVal>()) {
2659 if (std::optional NewB = tryBindSmallArray(B, R, AT, LCV: *LCV))
2660 return *NewB;
2661
2662 return bindAggregate(B, R, DefaultVal: Init);
2663 }
2664
2665 if (Init.isUnknown())
2666 return bindAggregate(B, R, DefaultVal: UnknownVal());
2667
2668 // Remaining case: explicit compound values.
2669 const nonloc::CompoundVal& CV = Init.castAs<nonloc::CompoundVal>();
2670 nonloc::CompoundVal::iterator VI = CV.begin(), VE = CV.end();
2671 uint64_t i = 0;
2672
2673 LimitedRegionBindingsRef NewB = B;
2674
2675 for (; Size ? i < *Size : true; ++i, ++VI) {
2676 // The init list might be shorter than the array length.
2677 if (VI == VE)
2678 break;
2679 if (NewB.hasExhaustedBindingLimit())
2680 return NewB.withValuesEscaped(Begin: VI, End: VE);
2681
2682 NonLoc Idx = svalBuilder.makeArrayIndex(idx: i);
2683 const ElementRegion *ER = MRMgr.getElementRegion(elementType: ElementTy, Idx, superRegion: R, Ctx);
2684
2685 if (ElementTy->isStructureOrClassType())
2686 NewB = bindStruct(B: NewB, R: ER, V: *VI);
2687 else if (ElementTy->isArrayType())
2688 NewB = bindArray(B: NewB, R: ER, Init: *VI);
2689 else
2690 NewB = bind(B: NewB, L: loc::MemRegionVal(ER), V: *VI);
2691 }
2692
2693 // If the init list is shorter than the array length (or the array has
2694 // variable length), set the array default value. Values that are already set
2695 // are not overwritten.
2696 if (!Size || i < *Size)
2697 NewB = setImplicitDefaultValue(B: NewB, R, T: ElementTy);
2698
2699 return NewB;
2700}
2701
2702LimitedRegionBindingsRef
2703RegionStoreManager::bindVector(LimitedRegionBindingsConstRef B,
2704 const TypedValueRegion *R, SVal V) {
2705 llvm::TimeTraceScope TimeScope("RegionStoreManager::bindVector",
2706 [R]() { return R->getDescriptiveName(); });
2707 if (B.hasExhaustedBindingLimit())
2708 return B.withValuesEscaped(V);
2709
2710 QualType T = R->getValueType();
2711 const VectorType *VT = T->castAs<VectorType>(); // Use castAs for typedefs.
2712
2713 // Handle lazy compound values and symbolic values.
2714 if (isa<nonloc::LazyCompoundVal, nonloc::SymbolVal>(Val: V))
2715 return bindAggregate(B, R, DefaultVal: V);
2716
2717 // We may get non-CompoundVal accidentally due to imprecise cast logic or
2718 // that we are binding symbolic struct value. Kill the field values, and if
2719 // the value is symbolic go and bind it as a "default" binding.
2720 if (!isa<nonloc::CompoundVal>(Val: V)) {
2721 return bindAggregate(B, R, DefaultVal: UnknownVal());
2722 }
2723
2724 QualType ElemType = VT->getElementType();
2725 nonloc::CompoundVal CV = V.castAs<nonloc::CompoundVal>();
2726 nonloc::CompoundVal::iterator VI = CV.begin(), VE = CV.end();
2727 unsigned index = 0, numElements = VT->getNumElements();
2728 LimitedRegionBindingsRef NewB = B;
2729
2730 for ( ; index != numElements ; ++index) {
2731 if (VI == VE)
2732 break;
2733
2734 if (NewB.hasExhaustedBindingLimit())
2735 return NewB.withValuesEscaped(Begin: VI, End: VE);
2736
2737 NonLoc Idx = svalBuilder.makeArrayIndex(idx: index);
2738 const ElementRegion *ER = MRMgr.getElementRegion(elementType: ElemType, Idx, superRegion: R, Ctx);
2739
2740 if (ElemType->isArrayType())
2741 NewB = bindArray(B: NewB, R: ER, Init: *VI);
2742 else if (ElemType->isStructureOrClassType())
2743 NewB = bindStruct(B: NewB, R: ER, V: *VI);
2744 else
2745 NewB = bind(B: NewB, L: loc::MemRegionVal(ER), V: *VI);
2746 }
2747 return NewB;
2748}
2749
2750std::optional<SVal>
2751RegionStoreManager::getUniqueDefaultBinding(RegionBindingsConstRef B,
2752 const TypedValueRegion *R) const {
2753 if (R != R->getBaseRegion())
2754 return std::nullopt;
2755
2756 const auto *Cluster = B.lookup(K: R);
2757 if (!Cluster || !llvm::hasSingleElement(C: *Cluster))
2758 return std::nullopt;
2759
2760 const auto [Key, Value] = *Cluster->begin();
2761 return Key.isDirect() ? std::optional<SVal>{} : Value;
2762}
2763
2764std::optional<SVal>
2765RegionStoreManager::getUniqueDefaultBinding(nonloc::LazyCompoundVal LCV) const {
2766 auto B = getRegionBindings(store: LCV.getStore());
2767 return getUniqueDefaultBinding(B, R: LCV.getRegion());
2768}
2769
2770std::optional<LimitedRegionBindingsRef> RegionStoreManager::tryBindSmallStruct(
2771 LimitedRegionBindingsConstRef B, const TypedValueRegion *R,
2772 const RecordDecl *RD, nonloc::LazyCompoundVal LCV) {
2773 if (B.hasExhaustedBindingLimit())
2774 return B.withValuesEscaped(V: LCV);
2775
2776 // If we try to copy a Conjured value representing the value of the whole
2777 // struct, don't try to element-wise copy each field.
2778 // That would unnecessarily bind Derived symbols slicing off the subregion for
2779 // the field from the whole Conjured symbol.
2780 //
2781 // struct Window { int width; int height; };
2782 // Window getWindow(); <-- opaque fn.
2783 // Window w = getWindow(); <-- conjures a new Window.
2784 // Window w2 = w; <-- trivial copy "w", calling "tryBindSmallStruct"
2785 //
2786 // We should not end up with a new Store for "w2" like this:
2787 // Direct [ 0..31]: Derived{Conj{}, w.width}
2788 // Direct [32..63]: Derived{Conj{}, w.height}
2789 // Instead, we should just bind that Conjured value instead.
2790 if (std::optional<SVal> Val = getUniqueDefaultBinding(LCV)) {
2791 return B.addBinding(K: BindingKey::Make(R, k: BindingKey::Default), V: Val.value());
2792 }
2793
2794 FieldVector Fields;
2795
2796 if (const CXXRecordDecl *Class = dyn_cast<CXXRecordDecl>(Val: RD))
2797 if (Class->getNumBases() != 0 || Class->getNumVBases() != 0)
2798 return std::nullopt;
2799
2800 for (const auto *FD : RD->fields()) {
2801 if (FD->isUnnamedBitField())
2802 continue;
2803
2804 // If there are too many fields, or if any of the fields are aggregates,
2805 // just use the LCV as a default binding.
2806 if (Fields.size() == SmallStructLimit)
2807 return std::nullopt;
2808
2809 QualType Ty = FD->getType();
2810
2811 // Zero length arrays are basically no-ops, so we also ignore them here.
2812 if (Ty->isConstantArrayType() &&
2813 Ctx.getConstantArrayElementCount(CA: Ctx.getAsConstantArrayType(T: Ty)) == 0)
2814 continue;
2815
2816 if (!(Ty->isScalarType() || Ty->isReferenceType()))
2817 return std::nullopt;
2818
2819 Fields.push_back(Elt: FD);
2820 }
2821
2822 LimitedRegionBindingsRef NewB = B;
2823
2824 for (const FieldDecl *Field : Fields) {
2825 const FieldRegion *SourceFR = MRMgr.getFieldRegion(fd: Field, superRegion: LCV.getRegion());
2826 SVal V = getBindingForField(B: getRegionBindings(store: LCV.getStore()), R: SourceFR);
2827
2828 const FieldRegion *DestFR = MRMgr.getFieldRegion(fd: Field, superRegion: R);
2829 NewB = bind(B: NewB, L: loc::MemRegionVal(DestFR), V);
2830 }
2831
2832 return NewB;
2833}
2834
2835LimitedRegionBindingsRef
2836RegionStoreManager::bindStruct(LimitedRegionBindingsConstRef B,
2837 const TypedValueRegion *R, SVal V) {
2838 llvm::TimeTraceScope TimeScope("RegionStoreManager::bindStruct",
2839 [R]() { return R->getDescriptiveName(); });
2840 if (B.hasExhaustedBindingLimit())
2841 return B.withValuesEscaped(V);
2842
2843 QualType T = R->getValueType();
2844 assert(T->isStructureOrClassType());
2845
2846 const RecordType* RT = T->castAs<RecordType>();
2847 const RecordDecl *RD = RT->getDecl();
2848
2849 if (!RD->isCompleteDefinition())
2850 return B;
2851
2852 // Handle lazy compound values and symbolic values.
2853 if (std::optional<nonloc::LazyCompoundVal> LCV =
2854 V.getAs<nonloc::LazyCompoundVal>()) {
2855 if (std::optional NewB = tryBindSmallStruct(B, R, RD, LCV: *LCV))
2856 return *NewB;
2857 return bindAggregate(B, R, DefaultVal: V);
2858 }
2859 if (isa<nonloc::SymbolVal>(Val: V))
2860 return bindAggregate(B, R, DefaultVal: V);
2861
2862 // We may get non-CompoundVal accidentally due to imprecise cast logic or
2863 // that we are binding symbolic struct value. Kill the field values, and if
2864 // the value is symbolic go and bind it as a "default" binding.
2865 if (V.isUnknown() || !isa<nonloc::CompoundVal>(Val: V))
2866 return bindAggregate(B, R, DefaultVal: UnknownVal());
2867
2868 // The raw CompoundVal is essentially a symbolic InitListExpr: an (immutable)
2869 // list of other values. It appears pretty much only when there's an actual
2870 // initializer list expression in the program, and the analyzer tries to
2871 // unwrap it as soon as possible.
2872 // This code is where such unwrap happens: when the compound value is put into
2873 // the object that it was supposed to initialize (it's an *initializer* list,
2874 // after all), instead of binding the whole value to the whole object, we bind
2875 // sub-values to sub-objects. Sub-values may themselves be compound values,
2876 // and in this case the procedure becomes recursive.
2877 // FIXME: The annoying part about compound values is that they don't carry
2878 // any sort of information about which value corresponds to which sub-object.
2879 // It's simply a list of values in the middle of nowhere; we expect to match
2880 // them to sub-objects, essentially, "by index": first value binds to
2881 // the first field, second value binds to the second field, etc.
2882 // It would have been much safer to organize non-lazy compound values as
2883 // a mapping from fields/bases to values.
2884 const nonloc::CompoundVal& CV = V.castAs<nonloc::CompoundVal>();
2885 nonloc::CompoundVal::iterator VI = CV.begin(), VE = CV.end();
2886
2887 LimitedRegionBindingsRef NewB = B;
2888
2889 // In C++17 aggregates may have base classes, handle those as well.
2890 // They appear before fields in the initializer list / compound value.
2891 if (const auto *CRD = dyn_cast<CXXRecordDecl>(Val: RD)) {
2892 // If the object was constructed with a constructor, its value is a
2893 // LazyCompoundVal. If it's a raw CompoundVal, it means that we're
2894 // performing aggregate initialization. The only exception from this
2895 // rule is sending an Objective-C++ message that returns a C++ object
2896 // to a nil receiver; in this case the semantics is to return a
2897 // zero-initialized object even if it's a C++ object that doesn't have
2898 // this sort of constructor; the CompoundVal is empty in this case.
2899 assert((CRD->isAggregate() || (Ctx.getLangOpts().ObjC && VI == VE)) &&
2900 "Non-aggregates are constructed with a constructor!");
2901
2902 for (const auto &B : CRD->bases()) {
2903 // (Multiple inheritance is fine though.)
2904 assert(!B.isVirtual() && "Aggregates cannot have virtual base classes!");
2905
2906 if (VI == VE)
2907 break;
2908 if (NewB.hasExhaustedBindingLimit())
2909 return NewB.withValuesEscaped(Begin: VI, End: VE);
2910
2911 QualType BTy = B.getType();
2912 assert(BTy->isStructureOrClassType() && "Base classes must be classes!");
2913
2914 const CXXRecordDecl *BRD = BTy->getAsCXXRecordDecl();
2915 assert(BRD && "Base classes must be C++ classes!");
2916
2917 const CXXBaseObjectRegion *BR =
2918 MRMgr.getCXXBaseObjectRegion(BaseClass: BRD, Super: R, /*IsVirtual=*/false);
2919
2920 NewB = bindStruct(B: NewB, R: BR, V: *VI);
2921
2922 ++VI;
2923 }
2924 }
2925
2926 RecordDecl::field_iterator FI, FE;
2927
2928 for (FI = RD->field_begin(), FE = RD->field_end(); FI != FE; ++FI) {
2929
2930 if (VI == VE)
2931 break;
2932
2933 if (NewB.hasExhaustedBindingLimit())
2934 return NewB.withValuesEscaped(Begin: VI, End: VE);
2935
2936 // Skip any unnamed bitfields to stay in sync with the initializers.
2937 if (FI->isUnnamedBitField())
2938 continue;
2939
2940 QualType FTy = FI->getType();
2941 const FieldRegion* FR = MRMgr.getFieldRegion(fd: *FI, superRegion: R);
2942
2943 if (FTy->isArrayType())
2944 NewB = bindArray(B: NewB, R: FR, Init: *VI);
2945 else if (FTy->isStructureOrClassType())
2946 NewB = bindStruct(B: NewB, R: FR, V: *VI);
2947 else
2948 NewB = bind(B: NewB, L: loc::MemRegionVal(FR), V: *VI);
2949 ++VI;
2950 }
2951
2952 if (NewB.hasExhaustedBindingLimit())
2953 return NewB.withValuesEscaped(Begin: VI, End: VE);
2954
2955 // There may be fewer values in the initialize list than the fields of struct.
2956 if (FI != FE) {
2957 NewB = NewB.addBinding(R, k: BindingKey::Default,
2958 V: svalBuilder.makeIntVal(integer: 0, isUnsigned: false));
2959 }
2960
2961 return NewB;
2962}
2963
2964LimitedRegionBindingsRef
2965RegionStoreManager::bindAggregate(LimitedRegionBindingsConstRef B,
2966 const TypedRegion *R, SVal Val) {
2967 llvm::TimeTraceScope TimeScope("RegionStoreManager::bindAggregate",
2968 [R]() { return R->getDescriptiveName(); });
2969 if (B.hasExhaustedBindingLimit())
2970 return B.withValuesEscaped(V: Val);
2971
2972 // Remove the old bindings, using 'R' as the root of all regions
2973 // we will invalidate. Then add the new binding.
2974 return removeSubRegionBindings(B, Top: R).addBinding(R, k: BindingKey::Default, V: Val);
2975}
2976
2977//===----------------------------------------------------------------------===//
2978// State pruning.
2979//===----------------------------------------------------------------------===//
2980
2981namespace {
2982class RemoveDeadBindingsWorker
2983 : public ClusterAnalysis<RemoveDeadBindingsWorker> {
2984 SmallVector<const SymbolicRegion *, 12> Postponed;
2985 SymbolReaper &SymReaper;
2986 const StackFrameContext *CurrentLCtx;
2987
2988public:
2989 RemoveDeadBindingsWorker(RegionStoreManager &rm,
2990 ProgramStateManager &stateMgr,
2991 RegionBindingsRef b, SymbolReaper &symReaper,
2992 const StackFrameContext *LCtx)
2993 : ClusterAnalysis<RemoveDeadBindingsWorker>(rm, stateMgr, b),
2994 SymReaper(symReaper), CurrentLCtx(LCtx) {}
2995
2996 // Called by ClusterAnalysis.
2997 void VisitAddedToCluster(const MemRegion *baseR, const ClusterBindings &C);
2998 void VisitCluster(const MemRegion *baseR, const ClusterBindings *C);
2999 using ClusterAnalysis<RemoveDeadBindingsWorker>::VisitCluster;
3000
3001 using ClusterAnalysis::AddToWorkList;
3002
3003 bool AddToWorkList(const MemRegion *R);
3004
3005 bool UpdatePostponed();
3006 void VisitBinding(SVal V);
3007};
3008}
3009
3010bool RemoveDeadBindingsWorker::AddToWorkList(const MemRegion *R) {
3011 const MemRegion *BaseR = R->getBaseRegion();
3012 return AddToWorkList(E: WorkListElement(BaseR), C: getCluster(R: BaseR));
3013}
3014
3015void RemoveDeadBindingsWorker::VisitAddedToCluster(const MemRegion *baseR,
3016 const ClusterBindings &C) {
3017
3018 if (const VarRegion *VR = dyn_cast<VarRegion>(Val: baseR)) {
3019 if (SymReaper.isLive(VR))
3020 AddToWorkList(E: baseR, C: &C);
3021
3022 return;
3023 }
3024
3025 if (const SymbolicRegion *SR = dyn_cast<SymbolicRegion>(Val: baseR)) {
3026 if (SymReaper.isLive(sym: SR->getSymbol()))
3027 AddToWorkList(E: SR, C: &C);
3028 else
3029 Postponed.push_back(Elt: SR);
3030
3031 return;
3032 }
3033
3034 if (isa<NonStaticGlobalSpaceRegion>(Val: baseR)) {
3035 AddToWorkList(E: baseR, C: &C);
3036 return;
3037 }
3038
3039 // CXXThisRegion in the current or parent location context is live.
3040 if (const CXXThisRegion *TR = dyn_cast<CXXThisRegion>(Val: baseR)) {
3041 const auto *StackReg =
3042 cast<StackArgumentsSpaceRegion>(Val: TR->getSuperRegion());
3043 const StackFrameContext *RegCtx = StackReg->getStackFrame();
3044 if (CurrentLCtx &&
3045 (RegCtx == CurrentLCtx || RegCtx->isParentOf(LC: CurrentLCtx)))
3046 AddToWorkList(E: TR, C: &C);
3047 }
3048}
3049
3050void RemoveDeadBindingsWorker::VisitCluster(const MemRegion *baseR,
3051 const ClusterBindings *C) {
3052 if (!C)
3053 return;
3054
3055 // Mark the symbol for any SymbolicRegion with live bindings as live itself.
3056 // This means we should continue to track that symbol.
3057 if (const SymbolicRegion *SymR = dyn_cast<SymbolicRegion>(Val: baseR))
3058 SymReaper.markLive(sym: SymR->getSymbol());
3059
3060 for (const auto &[Key, Val] : *C) {
3061 // Element index of a binding key is live.
3062 SymReaper.markElementIndicesLive(region: Key.getRegion());
3063
3064 VisitBinding(V: Val);
3065 }
3066}
3067
3068void RemoveDeadBindingsWorker::VisitBinding(SVal V) {
3069 // Is it a LazyCompoundVal? All referenced regions are live as well.
3070 // The LazyCompoundVal itself is not live but should be readable.
3071 if (auto LCS = V.getAs<nonloc::LazyCompoundVal>()) {
3072 SymReaper.markLazilyCopied(region: LCS->getRegion());
3073
3074 for (SVal V : RM.getInterestingValues(LCV: *LCS)) {
3075 if (auto DepLCS = V.getAs<nonloc::LazyCompoundVal>())
3076 SymReaper.markLazilyCopied(region: DepLCS->getRegion());
3077 else
3078 VisitBinding(V);
3079 }
3080
3081 return;
3082 }
3083
3084 // If V is a region, then add it to the worklist.
3085 if (const MemRegion *R = V.getAsRegion()) {
3086 AddToWorkList(R);
3087 SymReaper.markLive(region: R);
3088
3089 // All regions captured by a block are also live.
3090 if (const BlockDataRegion *BR = dyn_cast<BlockDataRegion>(Val: R)) {
3091 for (auto Var : BR->referenced_vars())
3092 AddToWorkList(R: Var.getCapturedRegion());
3093 }
3094 }
3095
3096
3097 // Update the set of live symbols.
3098 for (SymbolRef Sym : V.symbols())
3099 SymReaper.markLive(sym: Sym);
3100}
3101
3102bool RemoveDeadBindingsWorker::UpdatePostponed() {
3103 // See if any postponed SymbolicRegions are actually live now, after
3104 // having done a scan.
3105 bool Changed = false;
3106
3107 for (const SymbolicRegion *SR : Postponed) {
3108 if (SymReaper.isLive(sym: SR->getSymbol())) {
3109 Changed |= AddToWorkList(R: SR);
3110 SR = nullptr;
3111 }
3112 }
3113
3114 return Changed;
3115}
3116
3117StoreRef RegionStoreManager::removeDeadBindings(Store store,
3118 const StackFrameContext *LCtx,
3119 SymbolReaper& SymReaper) {
3120 RegionBindingsRef B = getRegionBindings(store);
3121 RemoveDeadBindingsWorker W(*this, StateMgr, B, SymReaper, LCtx);
3122 W.GenerateClusters();
3123
3124 // Enqueue the region roots onto the worklist.
3125 for (const MemRegion *Reg : SymReaper.regions()) {
3126 W.AddToWorkList(R: Reg);
3127 }
3128
3129 do W.RunWorkList(); while (W.UpdatePostponed());
3130
3131 // We have now scanned the store, marking reachable regions and symbols
3132 // as live. We now remove all the regions that are dead from the store
3133 // as well as update DSymbols with the set symbols that are now dead.
3134 for (const MemRegion *Base : llvm::make_first_range(c&: B)) {
3135 // If the cluster has been visited, we know the region has been marked.
3136 // Otherwise, remove the dead entry.
3137 if (!W.isVisited(R: Base))
3138 B = B.removeCluster(BaseRegion: Base);
3139 }
3140
3141 return StoreRef(B.asStore(), *this);
3142}
3143
3144//===----------------------------------------------------------------------===//
3145// Utility methods.
3146//===----------------------------------------------------------------------===//
3147
3148void RegionStoreManager::printJson(raw_ostream &Out, Store S, const char *NL,
3149 unsigned int Space, bool IsDot) const {
3150 RegionBindingsRef Bindings = getRegionBindings(store: S);
3151
3152 Indent(Out, Space, IsDot) << "\"store\": ";
3153
3154 if (Bindings.isEmpty()) {
3155 Out << "null," << NL;
3156 return;
3157 }
3158
3159 Out << "{ \"pointer\": \"" << Bindings.asStore() << "\", \"items\": [" << NL;
3160 Bindings.printJson(Out, NL, Space: Space + 1, IsDot);
3161 Indent(Out, Space, IsDot) << "]}," << NL;
3162}
3163