1//== RegionStore.cpp - Field-sensitive store model --------------*- C++ -*--==//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8//
9// This file defines a basic region store model. In this model, we do have field
10// sensitivity. But we assume nothing about the heap shape. So recursive data
11// structures are largely ignored. Basically we do 1-limiting analysis.
12// Parameter pointers are assumed with no aliasing. Pointee objects of
13// parameters are created lazily.
14//
15//===----------------------------------------------------------------------===//
16
17#include "clang/AST/Attr.h"
18#include "clang/AST/CharUnits.h"
19#include "clang/ASTMatchers/ASTMatchFinder.h"
20#include "clang/Analysis/AnalysisDeclContext.h"
21#include "clang/Basic/JsonSupport.h"
22#include "clang/Basic/TargetInfo.h"
23#include "clang/StaticAnalyzer/Core/PathSensitive/AnalysisManager.h"
24#include "clang/StaticAnalyzer/Core/PathSensitive/CallEvent.h"
25#include "clang/StaticAnalyzer/Core/PathSensitive/ExprEngine.h"
26#include "clang/StaticAnalyzer/Core/PathSensitive/MemRegion.h"
27#include "clang/StaticAnalyzer/Core/PathSensitive/ProgramState.h"
28#include "llvm/ADT/ImmutableMap.h"
29#include "llvm/ADT/STLExtras.h"
30#include "llvm/Support/TimeProfiler.h"
31#include "llvm/Support/raw_ostream.h"
32#include <limits>
33#include <optional>
34#include <utility>
35
36using namespace clang;
37using namespace ento;
38
39//===----------------------------------------------------------------------===//
40// Representation of binding keys.
41//===----------------------------------------------------------------------===//
42
43namespace {
44class BindingKey {
45public:
46 enum Kind {
47 Default = 0x0,
48 Direct = 0x1,
49 Symbolic = 0x2,
50 };
51
52private:
53 llvm::PointerIntPair<const MemRegion *, 2> P;
54 uint64_t Data;
55
56 /// Create a key for a binding to region \p r, which has a symbolic offset
57 /// from region \p Base.
58 explicit BindingKey(const SubRegion *r, const SubRegion *Base, Kind k)
59 : P(r, k | Symbolic), Data(reinterpret_cast<uintptr_t>(Base)) {
60 assert(r && Base && "Must have known regions.");
61 assert(getConcreteOffsetRegion() == Base && "Failed to store base region");
62 }
63
64 /// Create a key for a binding at \p offset from base region \p r.
65 explicit BindingKey(const MemRegion *r, uint64_t offset, Kind k)
66 : P(r, k), Data(offset) {
67 assert(r && "Must have known regions.");
68 assert(getOffset() == offset && "Failed to store offset");
69 assert((r == r->getBaseRegion() ||
70 isa<ObjCIvarRegion, CXXDerivedObjectRegion>(r)) &&
71 "Not a base");
72 }
73
74public:
75 bool isDirect() const { return P.getInt() & Direct; }
76 bool isDefault() const { return !isDirect(); }
77 bool hasSymbolicOffset() const { return P.getInt() & Symbolic; }
78
79 const MemRegion *getRegion() const { return P.getPointer(); }
80 uint64_t getOffset() const {
81 assert(!hasSymbolicOffset());
82 return Data;
83 }
84
85 const SubRegion *getConcreteOffsetRegion() const {
86 assert(hasSymbolicOffset());
87 return reinterpret_cast<const SubRegion *>(static_cast<uintptr_t>(Data));
88 }
89
90 const MemRegion *getBaseRegion() const {
91 if (hasSymbolicOffset())
92 return getConcreteOffsetRegion()->getBaseRegion();
93 return getRegion()->getBaseRegion();
94 }
95
96 void Profile(llvm::FoldingSetNodeID& ID) const {
97 ID.AddPointer(Ptr: P.getOpaqueValue());
98 ID.AddInteger(I: Data);
99 }
100
101 static BindingKey Make(const MemRegion *R, Kind k);
102
103 bool operator<(const BindingKey &X) const {
104 if (P.getOpaqueValue() < X.P.getOpaqueValue())
105 return true;
106 if (P.getOpaqueValue() > X.P.getOpaqueValue())
107 return false;
108 return Data < X.Data;
109 }
110
111 bool operator==(const BindingKey &X) const {
112 return P.getOpaqueValue() == X.P.getOpaqueValue() &&
113 Data == X.Data;
114 }
115
116 LLVM_DUMP_METHOD void dump() const;
117};
118
119std::string locDescr(Loc L) {
120 std::string S;
121 llvm::raw_string_ostream OS(S);
122 L.dumpToStream(Out&: OS);
123 return OS.str();
124}
125} // end anonymous namespace
126
127BindingKey BindingKey::Make(const MemRegion *R, Kind k) {
128 const RegionOffset &RO = R->getAsOffset();
129 if (RO.hasSymbolicOffset())
130 return BindingKey(cast<SubRegion>(Val: R), cast<SubRegion>(Val: RO.getRegion()), k);
131
132 return BindingKey(RO.getRegion(), RO.getOffset(), k);
133}
134
135namespace llvm {
136static inline raw_ostream &operator<<(raw_ostream &Out, BindingKey K) {
137 Out << "\"kind\": \"" << (K.isDirect() ? "Direct" : "Default")
138 << "\", \"offset\": ";
139
140 if (!K.hasSymbolicOffset())
141 Out << K.getOffset();
142 else
143 Out << "null";
144
145 return Out;
146}
147
148} // namespace llvm
149
150#if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
151void BindingKey::dump() const { llvm::errs() << *this; }
152#endif
153
154//===----------------------------------------------------------------------===//
155// Actual Store type.
156//===----------------------------------------------------------------------===//
157
158typedef llvm::ImmutableMap<BindingKey, SVal> ClusterBindings;
159typedef llvm::ImmutableMapRef<BindingKey, SVal> ClusterBindingsRef;
160typedef std::pair<BindingKey, SVal> BindingPair;
161
162typedef llvm::ImmutableMap<const MemRegion *, ClusterBindings>
163 RegionBindings;
164
165namespace {
166class RegionBindingsRef : public llvm::ImmutableMapRef<const MemRegion *,
167 ClusterBindings> {
168 ClusterBindings::Factory *CBFactory;
169
170 // This flag indicates whether the current bindings are within the analysis
171 // that has started from main(). It affects how we perform loads from
172 // global variables that have initializers: if we have observed the
173 // program execution from the start and we know that these variables
174 // have not been overwritten yet, we can be sure that their initializers
175 // are still relevant. This flag never gets changed when the bindings are
176 // updated, so it could potentially be moved into RegionStoreManager
177 // (as if it's the same bindings but a different loading procedure)
178 // however that would have made the manager needlessly stateful.
179 bool IsMainAnalysis;
180
181public:
182 typedef llvm::ImmutableMapRef<const MemRegion *, ClusterBindings>
183 ParentTy;
184
185 RegionBindingsRef(ClusterBindings::Factory &CBFactory,
186 const RegionBindings::TreeTy *T,
187 RegionBindings::TreeTy::Factory *F, bool IsMainAnalysis)
188 : RegionBindingsRef(ParentTy(T, F), CBFactory, IsMainAnalysis) {}
189
190 RegionBindingsRef(const ParentTy &P, ClusterBindings::Factory &CBFactory,
191 bool IsMainAnalysis)
192 : ParentTy(P), CBFactory(&CBFactory), IsMainAnalysis(IsMainAnalysis) {}
193
194 RegionBindingsRef removeCluster(const MemRegion *BaseRegion) const {
195 return RegionBindingsRef(ParentTy::remove(K: BaseRegion), *CBFactory,
196 IsMainAnalysis);
197 }
198
199 RegionBindingsRef addBinding(BindingKey K, SVal V) const;
200
201 RegionBindingsRef addBinding(const MemRegion *R,
202 BindingKey::Kind k, SVal V) const;
203
204 const SVal *lookup(BindingKey K) const;
205 const SVal *lookup(const MemRegion *R, BindingKey::Kind k) const;
206 using llvm::ImmutableMapRef<const MemRegion *, ClusterBindings>::lookup;
207
208 RegionBindingsRef removeBinding(BindingKey K);
209
210 RegionBindingsRef removeBinding(const MemRegion *R,
211 BindingKey::Kind k);
212
213 RegionBindingsRef removeBinding(const MemRegion *R) {
214 return removeBinding(R, k: BindingKey::Direct).
215 removeBinding(R, k: BindingKey::Default);
216 }
217
218 std::optional<SVal> getDirectBinding(const MemRegion *R) const;
219
220 /// getDefaultBinding - Returns an SVal* representing an optional default
221 /// binding associated with a region and its subregions.
222 std::optional<SVal> getDefaultBinding(const MemRegion *R) const;
223
224 /// Return the internal tree as a Store.
225 Store asStore() const {
226 llvm::PointerIntPair<Store, 1, bool> Ptr = {
227 asImmutableMap().getRootWithoutRetain(), IsMainAnalysis};
228 return reinterpret_cast<Store>(Ptr.getOpaqueValue());
229 }
230
231 bool isMainAnalysis() const {
232 return IsMainAnalysis;
233 }
234
235 void printJson(raw_ostream &Out, const char *NL = "\n",
236 unsigned int Space = 0, bool IsDot = false) const {
237 using namespace llvm;
238 DenseMap<const MemRegion *, std::string> StringifyCache;
239 auto ToString = [&StringifyCache](const MemRegion *R) {
240 auto [Place, Inserted] = StringifyCache.try_emplace(Key: R);
241 if (!Inserted)
242 return Place->second;
243 std::string Res;
244 raw_string_ostream OS(Res);
245 OS << R;
246 Place->second = Res;
247 return Res;
248 };
249
250 using Cluster =
251 std::pair<const MemRegion *, ImmutableMap<BindingKey, SVal>>;
252 using Binding = std::pair<BindingKey, SVal>;
253
254 const auto MemSpaceBeforeRegionName = [&ToString](const Cluster *L,
255 const Cluster *R) {
256 if (isa<MemSpaceRegion>(Val: L->first) && !isa<MemSpaceRegion>(Val: R->first))
257 return true;
258 if (!isa<MemSpaceRegion>(Val: L->first) && isa<MemSpaceRegion>(Val: R->first))
259 return false;
260 return ToString(L->first) < ToString(R->first);
261 };
262
263 const auto SymbolicBeforeOffset = [&ToString](const BindingKey &L,
264 const BindingKey &R) {
265 if (L.hasSymbolicOffset() && !R.hasSymbolicOffset())
266 return true;
267 if (!L.hasSymbolicOffset() && R.hasSymbolicOffset())
268 return false;
269 if (L.hasSymbolicOffset() && R.hasSymbolicOffset())
270 return ToString(L.getRegion()) < ToString(R.getRegion());
271 return L.getOffset() < R.getOffset();
272 };
273
274 const auto DefaultBindingBeforeDirectBindings =
275 [&SymbolicBeforeOffset](const Binding *LPtr, const Binding *RPtr) {
276 const BindingKey &L = LPtr->first;
277 const BindingKey &R = RPtr->first;
278 if (L.isDefault() && !R.isDefault())
279 return true;
280 if (!L.isDefault() && R.isDefault())
281 return false;
282 assert(L.isDefault() == R.isDefault());
283 return SymbolicBeforeOffset(L, R);
284 };
285
286 const auto AddrOf = [](const auto &Item) { return &Item; };
287
288 std::vector<const Cluster *> SortedClusters;
289 SortedClusters.reserve(n: std::distance(first: begin(), last: end()));
290 append_range(C&: SortedClusters, R: map_range(C: *this, F: AddrOf));
291 llvm::sort(C&: SortedClusters, Comp: MemSpaceBeforeRegionName);
292
293 for (auto [Idx, C] : llvm::enumerate(First&: SortedClusters)) {
294 const auto &[BaseRegion, Bindings] = *C;
295 Indent(Out, Space, IsDot)
296 << "{ \"cluster\": \"" << BaseRegion << "\", \"pointer\": \""
297 << (const void *)BaseRegion << "\", \"items\": [" << NL;
298
299 std::vector<const Binding *> SortedBindings;
300 SortedBindings.reserve(n: std::distance(first: Bindings.begin(), last: Bindings.end()));
301 append_range(C&: SortedBindings, R: map_range(C: Bindings, F: AddrOf));
302 llvm::sort(C&: SortedBindings, Comp: DefaultBindingBeforeDirectBindings);
303
304 ++Space;
305 for (auto [Idx, B] : llvm::enumerate(First&: SortedBindings)) {
306 const auto &[Key, Value] = *B;
307 Indent(Out, Space, IsDot) << "{ " << Key << ", \"value\": ";
308 Value.printJson(Out, /*AddQuotes=*/true);
309 Out << " }";
310 if (Idx != SortedBindings.size() - 1)
311 Out << ',';
312 Out << NL;
313 }
314 --Space;
315 Indent(Out, Space, IsDot) << "]}";
316 if (Idx != SortedClusters.size() - 1)
317 Out << ',';
318 Out << NL;
319 }
320 }
321
322 LLVM_DUMP_METHOD void dump() const { printJson(Out&: llvm::errs()); }
323
324protected:
325 RegionBindingsRef
326 commitBindingsToCluster(const MemRegion *BaseRegion,
327 const ClusterBindings &Bindings) const;
328};
329} // end anonymous namespace
330
331/// This class represents the same as \c RegionBindingsRef, but with a limit on
332/// the number of bindings that can be added.
333class LimitedRegionBindingsRef : public RegionBindingsRef {
334public:
335 LimitedRegionBindingsRef(RegionBindingsRef Base,
336 SmallVectorImpl<SVal> &EscapedValuesDuringBind,
337 std::optional<unsigned> BindingsLeft)
338 : RegionBindingsRef(Base),
339 EscapedValuesDuringBind(&EscapedValuesDuringBind),
340 BindingsLeft(BindingsLeft) {}
341
342 bool hasExhaustedBindingLimit() const {
343 return BindingsLeft.has_value() && BindingsLeft.value() == 0;
344 }
345
346 LimitedRegionBindingsRef withValuesEscaped(SVal V) const {
347 EscapedValuesDuringBind->push_back(Elt: V);
348 return *this;
349 }
350
351 LimitedRegionBindingsRef
352 withValuesEscaped(nonloc::CompoundVal::iterator Begin,
353 nonloc::CompoundVal::iterator End) const {
354 for (SVal V : llvm::make_range(x: Begin, y: End))
355 withValuesEscaped(V);
356 return *this;
357 }
358
359 LimitedRegionBindingsRef
360 addWithoutDecreasingLimit(const MemRegion *BaseRegion,
361 data_type_ref BindingKeyAndValue) const {
362 return LimitedRegionBindingsRef{RegionBindingsRef::commitBindingsToCluster(
363 BaseRegion, Bindings: BindingKeyAndValue),
364 *EscapedValuesDuringBind, BindingsLeft};
365 }
366
367 LimitedRegionBindingsRef removeCluster(const MemRegion *BaseRegion) const {
368 return LimitedRegionBindingsRef{
369 RegionBindingsRef::removeCluster(BaseRegion), *EscapedValuesDuringBind,
370 BindingsLeft};
371 }
372
373 LimitedRegionBindingsRef addBinding(BindingKey K, SVal V) const {
374 std::optional<unsigned> NewBindingsLeft = BindingsLeft;
375 if (NewBindingsLeft.has_value()) {
376 assert(NewBindingsLeft.value() != 0);
377 NewBindingsLeft.value() -= 1;
378
379 // If we just exhausted the binding limit, highjack
380 // this bind call for the default binding.
381 if (NewBindingsLeft.value() == 0) {
382 withValuesEscaped(V);
383 K = BindingKey::Make(R: K.getRegion(), k: BindingKey::Default);
384 V = UnknownVal();
385 }
386 }
387
388 return LimitedRegionBindingsRef{RegionBindingsRef::addBinding(K, V),
389 *EscapedValuesDuringBind, NewBindingsLeft};
390 }
391
392 LimitedRegionBindingsRef addBinding(const MemRegion *R, BindingKey::Kind k,
393 SVal V) const {
394 return addBinding(K: BindingKey::Make(R, k), V);
395 }
396
397private:
398 SmallVectorImpl<SVal> *EscapedValuesDuringBind; // nonnull
399 std::optional<unsigned> BindingsLeft;
400};
401
402typedef const RegionBindingsRef& RegionBindingsConstRef;
403typedef const LimitedRegionBindingsRef &LimitedRegionBindingsConstRef;
404
405std::optional<SVal>
406RegionBindingsRef::getDirectBinding(const MemRegion *R) const {
407 const SVal *V = lookup(R, k: BindingKey::Direct);
408 return V ? std::optional<SVal>(*V) : std::nullopt;
409}
410
411std::optional<SVal>
412RegionBindingsRef::getDefaultBinding(const MemRegion *R) const {
413 const SVal *V = lookup(R, k: BindingKey::Default);
414 return V ? std::optional<SVal>(*V) : std::nullopt;
415}
416
417RegionBindingsRef RegionBindingsRef::commitBindingsToCluster(
418 const MemRegion *BaseRegion, const ClusterBindings &Bindings) const {
419 return RegionBindingsRef(ParentTy::add(K: BaseRegion, D: Bindings), *CBFactory,
420 IsMainAnalysis);
421}
422
423RegionBindingsRef RegionBindingsRef::addBinding(BindingKey K, SVal V) const {
424 const MemRegion *Base = K.getBaseRegion();
425
426 const ClusterBindings *ExistingCluster = lookup(K: Base);
427 ClusterBindings Bindings =
428 (ExistingCluster ? *ExistingCluster : CBFactory->getEmptyMap());
429 Bindings = CBFactory->add(Old: Bindings, K, D: V);
430 return commitBindingsToCluster(BaseRegion: Base, Bindings);
431}
432
433RegionBindingsRef RegionBindingsRef::addBinding(const MemRegion *R,
434 BindingKey::Kind k,
435 SVal V) const {
436 return addBinding(K: BindingKey::Make(R, k), V);
437}
438
439const SVal *RegionBindingsRef::lookup(BindingKey K) const {
440 const ClusterBindings *Cluster = lookup(K: K.getBaseRegion());
441 if (!Cluster)
442 return nullptr;
443 return Cluster->lookup(K);
444}
445
446const SVal *RegionBindingsRef::lookup(const MemRegion *R,
447 BindingKey::Kind k) const {
448 return lookup(K: BindingKey::Make(R, k));
449}
450
451RegionBindingsRef RegionBindingsRef::removeBinding(BindingKey K) {
452 const MemRegion *Base = K.getBaseRegion();
453 const ClusterBindings *Cluster = lookup(K: Base);
454 if (!Cluster)
455 return *this;
456
457 ClusterBindings NewCluster = CBFactory->remove(Old: *Cluster, K);
458 if (NewCluster.isEmpty())
459 return removeCluster(BaseRegion: Base);
460 return commitBindingsToCluster(BaseRegion: Base, Bindings: NewCluster);
461}
462
463RegionBindingsRef RegionBindingsRef::removeBinding(const MemRegion *R,
464 BindingKey::Kind k){
465 return removeBinding(K: BindingKey::Make(R, k));
466}
467
468//===----------------------------------------------------------------------===//
469// Main RegionStore logic.
470//===----------------------------------------------------------------------===//
471
472namespace {
473class InvalidateRegionsWorker;
474
475class RegionStoreManager : public StoreManager {
476public:
477 RegionBindings::Factory RBFactory;
478 mutable ClusterBindings::Factory CBFactory;
479
480 typedef std::vector<SVal> SValListTy;
481private:
482 typedef llvm::DenseMap<const LazyCompoundValData *,
483 SValListTy> LazyBindingsMapTy;
484 LazyBindingsMapTy LazyBindingsMap;
485
486 /// The largest number of fields a struct can have and still be
487 /// considered "small".
488 ///
489 /// This is currently used to decide whether or not it is worth "forcing" a
490 /// LazyCompoundVal on bind.
491 ///
492 /// This is controlled by 'region-store-small-struct-limit' option.
493 /// To disable all small-struct-dependent behavior, set the option to "0".
494 const unsigned SmallStructLimit;
495
496 /// The largest number of element an array can have and still be
497 /// considered "small".
498 ///
499 /// This is currently used to decide whether or not it is worth "forcing" a
500 /// LazyCompoundVal on bind.
501 ///
502 /// This is controlled by 'region-store-small-struct-limit' option.
503 /// To disable all small-struct-dependent behavior, set the option to "0".
504 const unsigned SmallArrayLimit;
505
506 /// The number of bindings a single bind operation can scatter into.
507 /// For example, binding the initializer-list of an array would recurse and
508 /// bind all the individual array elements, potentially causing scalability
509 /// issues. Nullopt if the limit is disabled.
510 const std::optional<unsigned> RegionStoreMaxBindingFanOutPlusOne;
511
512 /// A helper used to populate the work list with the given set of
513 /// regions.
514 void populateWorkList(InvalidateRegionsWorker &W,
515 ArrayRef<SVal> Values,
516 InvalidatedRegions *TopLevelRegions);
517
518 const AnalyzerOptions &getOptions() {
519 return StateMgr.getOwningEngine().getAnalysisManager().options;
520 }
521
522public:
523 RegionStoreManager(ProgramStateManager &mgr)
524 : StoreManager(mgr), RBFactory(mgr.getAllocator()),
525 CBFactory(mgr.getAllocator()),
526 SmallStructLimit(getOptions().RegionStoreSmallStructLimit),
527 SmallArrayLimit(getOptions().RegionStoreSmallArrayLimit),
528 RegionStoreMaxBindingFanOutPlusOne([&]() -> std::optional<unsigned> {
529 unsigned FanOut = getOptions().RegionStoreMaxBindingFanOut;
530 assert(FanOut != std::numeric_limits<unsigned>::max());
531 if (FanOut == 0)
532 return std::nullopt;
533 return FanOut + 1 /*for the default binding*/;
534 }()) {}
535
536 /// setImplicitDefaultValue - Set the default binding for the provided
537 /// MemRegion to the value implicitly defined for compound literals when
538 /// the value is not specified.
539 LimitedRegionBindingsRef
540 setImplicitDefaultValue(LimitedRegionBindingsConstRef B, const MemRegion *R,
541 QualType T);
542
543 /// ArrayToPointer - Emulates the "decay" of an array to a pointer
544 /// type. 'Array' represents the lvalue of the array being decayed
545 /// to a pointer, and the returned SVal represents the decayed
546 /// version of that lvalue (i.e., a pointer to the first element of
547 /// the array). This is called by ExprEngine when evaluating
548 /// casts from arrays to pointers.
549 SVal ArrayToPointer(Loc Array, QualType ElementTy) override;
550
551 /// Creates the Store that correctly represents memory contents before
552 /// the beginning of the analysis of the given top-level stack frame.
553 StoreRef getInitialStore(const LocationContext *InitLoc) override {
554 bool IsMainAnalysis = false;
555 if (const auto *FD = dyn_cast<FunctionDecl>(Val: InitLoc->getDecl()))
556 IsMainAnalysis = FD->isMain() && !Ctx.getLangOpts().CPlusPlus;
557 return StoreRef(RegionBindingsRef(RegionBindingsRef::ParentTy(
558 RBFactory.getEmptyMap(), RBFactory),
559 CBFactory, IsMainAnalysis)
560 .asStore(),
561 *this);
562 }
563
564 //===-------------------------------------------------------------------===//
565 // Binding values to regions.
566 //===-------------------------------------------------------------------===//
567 RegionBindingsRef
568 invalidateGlobalRegion(MemRegion::Kind K, ConstCFGElementRef Elem,
569 unsigned Count, const LocationContext *LCtx,
570 RegionBindingsRef B, InvalidatedRegions *Invalidated);
571
572 StoreRef invalidateRegions(Store store, ArrayRef<SVal> Values,
573 ConstCFGElementRef Elem, unsigned Count,
574 const LocationContext *LCtx, const CallEvent *Call,
575 InvalidatedSymbols &IS,
576 RegionAndSymbolInvalidationTraits &ITraits,
577 InvalidatedRegions *Invalidated,
578 InvalidatedRegions *InvalidatedTopLevel) override;
579
580 bool scanReachableSymbols(Store S, const MemRegion *R,
581 ScanReachableSymbols &Callbacks) override;
582
583 LimitedRegionBindingsRef
584 removeSubRegionBindings(LimitedRegionBindingsConstRef B, const SubRegion *R);
585 std::optional<SVal>
586 getConstantValFromConstArrayInitializer(RegionBindingsConstRef B,
587 const ElementRegion *R);
588 std::optional<SVal>
589 getSValFromInitListExpr(const InitListExpr *ILE,
590 const SmallVector<uint64_t, 2> &ConcreteOffsets,
591 QualType ElemT);
592 SVal getSValFromStringLiteral(const StringLiteral *SL, uint64_t Offset,
593 QualType ElemT);
594
595public: // Part of public interface to class.
596 BindResult Bind(Store store, Loc LV, SVal V) override {
597 llvm::SmallVector<SVal, 0> EscapedValuesDuringBind;
598 LimitedRegionBindingsRef BoundedBindings =
599 getRegionBindings(store, EscapedValuesDuringBind);
600 return BindResult{.ResultingStore: StoreRef(bind(B: BoundedBindings, LV, V).asStore(), *this),
601 .FailedToBindValues: std::move(EscapedValuesDuringBind)};
602 }
603
604 LimitedRegionBindingsRef bind(LimitedRegionBindingsConstRef B, Loc LV,
605 SVal V);
606
607 // BindDefaultInitial is only used to initialize a region with
608 // a default value.
609 BindResult BindDefaultInitial(Store store, const MemRegion *R,
610 SVal V) override {
611 RegionBindingsRef B = getRegionBindings(store);
612 // Use other APIs when you have to wipe the region that was initialized
613 // earlier.
614 assert(!(B.getDefaultBinding(R) || B.getDirectBinding(R)) &&
615 "Double initialization!");
616 B = B.addBinding(K: BindingKey::Make(R, k: BindingKey::Default), V);
617 return BindResult{
618 .ResultingStore: StoreRef(B.asImmutableMap().getRootWithoutRetain(), *this), .FailedToBindValues: {}};
619 }
620
621 // BindDefaultZero is used for zeroing constructors that may accidentally
622 // overwrite existing bindings.
623 BindResult BindDefaultZero(Store store, const MemRegion *R) override {
624 // FIXME: The offsets of empty bases can be tricky because of
625 // of the so called "empty base class optimization".
626 // If a base class has been optimized out
627 // we should not try to create a binding, otherwise we should.
628 // Unfortunately, at the moment ASTRecordLayout doesn't expose
629 // the actual sizes of the empty bases
630 // and trying to infer them from offsets/alignments
631 // seems to be error-prone and non-trivial because of the trailing padding.
632 // As a temporary mitigation we don't create bindings for empty bases.
633 if (const auto *BR = dyn_cast<CXXBaseObjectRegion>(Val: R))
634 if (BR->getDecl()->isEmpty())
635 return BindResult{.ResultingStore: StoreRef(store, *this), .FailedToBindValues: {}};
636
637 llvm::SmallVector<SVal, 0> EscapedValuesDuringBind;
638 LimitedRegionBindingsRef B =
639 getRegionBindings(store, EscapedValuesDuringBind);
640 SVal V = svalBuilder.makeZeroVal(type: Ctx.CharTy);
641 B = removeSubRegionBindings(B, R: cast<SubRegion>(Val: R));
642 B = B.addBinding(K: BindingKey::Make(R, k: BindingKey::Default), V);
643 return BindResult{
644 .ResultingStore: StoreRef(B.asImmutableMap().getRootWithoutRetain(), *this),
645 .FailedToBindValues: std::move(EscapedValuesDuringBind)};
646 }
647
648 /// Attempt to extract the fields of \p LCV and bind them to the struct region
649 /// \p R.
650 ///
651 /// This path is used when it seems advantageous to "force" loading the values
652 /// within a LazyCompoundVal to bind memberwise to the struct region, rather
653 /// than using a Default binding at the base of the entire region. This is a
654 /// heuristic attempting to avoid building long chains of LazyCompoundVals.
655 ///
656 /// \returns The updated store bindings, or \c std::nullopt if binding
657 /// non-lazily would be too expensive.
658 std::optional<LimitedRegionBindingsRef>
659 tryBindSmallStruct(LimitedRegionBindingsConstRef B, const TypedValueRegion *R,
660 const RecordDecl *RD, nonloc::LazyCompoundVal LCV);
661
662 /// BindStruct - Bind a compound value to a structure.
663 LimitedRegionBindingsRef bindStruct(LimitedRegionBindingsConstRef B,
664 const TypedValueRegion *R, SVal V);
665
666 /// BindVector - Bind a compound value to a vector.
667 LimitedRegionBindingsRef bindVector(LimitedRegionBindingsConstRef B,
668 const TypedValueRegion *R, SVal V);
669
670 std::optional<LimitedRegionBindingsRef>
671 tryBindSmallArray(LimitedRegionBindingsConstRef B, const TypedValueRegion *R,
672 const ArrayType *AT, nonloc::LazyCompoundVal LCV);
673
674 LimitedRegionBindingsRef bindArray(LimitedRegionBindingsConstRef B,
675 const TypedValueRegion *R, SVal V);
676
677 /// Clears out all bindings in the given region and assigns a new value
678 /// as a Default binding.
679 LimitedRegionBindingsRef bindAggregate(LimitedRegionBindingsConstRef B,
680 const TypedRegion *R, SVal DefaultVal);
681
682 /// Create a new store with the specified binding removed.
683 /// \param ST the original store, that is the basis for the new store.
684 /// \param L the location whose binding should be removed.
685 StoreRef killBinding(Store ST, Loc L) override;
686
687 void incrementReferenceCount(Store store) override {
688 getRegionBindings(store).manualRetain();
689 }
690
691 /// If the StoreManager supports it, decrement the reference count of
692 /// the specified Store object. If the reference count hits 0, the memory
693 /// associated with the object is recycled.
694 void decrementReferenceCount(Store store) override {
695 getRegionBindings(store).manualRelease();
696 }
697
698 bool includedInBindings(Store store, const MemRegion *region) const override;
699
700 /// Return the value bound to specified location in a given state.
701 ///
702 /// The high level logic for this method is this:
703 /// getBinding (L)
704 /// if L has binding
705 /// return L's binding
706 /// else if L is in killset
707 /// return unknown
708 /// else
709 /// if L is on stack or heap
710 /// return undefined
711 /// else
712 /// return symbolic
713 SVal getBinding(Store S, Loc L, QualType T) override {
714 return getBinding(B: getRegionBindings(store: S), L, T);
715 }
716
717 std::optional<SVal> getDefaultBinding(Store S, const MemRegion *R) override {
718 RegionBindingsRef B = getRegionBindings(store: S);
719 // Default bindings are always applied over a base region so look up the
720 // base region's default binding, otherwise the lookup will fail when R
721 // is at an offset from R->getBaseRegion().
722 return B.getDefaultBinding(R: R->getBaseRegion());
723 }
724
725 SVal getBinding(RegionBindingsConstRef B, Loc L, QualType T = QualType());
726
727 SVal getBindingForElement(RegionBindingsConstRef B, const ElementRegion *R);
728
729 SVal getBindingForField(RegionBindingsConstRef B, const FieldRegion *R);
730
731 SVal getBindingForObjCIvar(RegionBindingsConstRef B, const ObjCIvarRegion *R);
732
733 SVal getBindingForVar(RegionBindingsConstRef B, const VarRegion *R);
734
735 SVal getBindingForLazySymbol(const TypedValueRegion *R);
736
737 SVal getBindingForFieldOrElementCommon(RegionBindingsConstRef B,
738 const TypedValueRegion *R,
739 QualType Ty);
740
741 SVal getLazyBinding(const SubRegion *LazyBindingRegion,
742 RegionBindingsRef LazyBinding);
743
744 /// Get bindings for the values in a struct and return a CompoundVal, used
745 /// when doing struct copy:
746 /// struct s x, y;
747 /// x = y;
748 /// y's value is retrieved by this method.
749 SVal getBindingForStruct(RegionBindingsConstRef B, const TypedValueRegion *R);
750 SVal getBindingForArray(RegionBindingsConstRef B, const TypedValueRegion *R);
751 NonLoc createLazyBinding(RegionBindingsConstRef B, const TypedValueRegion *R);
752
753 /// Used to lazily generate derived symbols for bindings that are defined
754 /// implicitly by default bindings in a super region.
755 ///
756 /// Note that callers may need to specially handle LazyCompoundVals, which
757 /// are returned as is in case the caller needs to treat them differently.
758 std::optional<SVal>
759 getBindingForDerivedDefaultValue(RegionBindingsConstRef B,
760 const MemRegion *superR,
761 const TypedValueRegion *R, QualType Ty);
762
763 /// Get the state and region whose binding this region \p R corresponds to.
764 ///
765 /// If there is no lazy binding for \p R, the returned value will have a null
766 /// \c second. Note that a null pointer can represents a valid Store.
767 std::pair<Store, const SubRegion *>
768 findLazyBinding(RegionBindingsConstRef B, const SubRegion *R,
769 const SubRegion *originalRegion);
770
771 /// Returns the cached set of interesting SVals contained within a lazy
772 /// binding.
773 ///
774 /// The precise value of "interesting" is determined for the purposes of
775 /// RegionStore's internal analysis. It must always contain all regions and
776 /// symbols, but may omit constants and other kinds of SVal.
777 ///
778 /// In contrast to compound values, LazyCompoundVals are also added
779 /// to the 'interesting values' list in addition to the child interesting
780 /// values.
781 const SValListTy &getInterestingValues(nonloc::LazyCompoundVal LCV);
782
783 //===------------------------------------------------------------------===//
784 // State pruning.
785 //===------------------------------------------------------------------===//
786
787 /// removeDeadBindings - Scans the RegionStore of 'state' for dead values.
788 /// It returns a new Store with these values removed.
789 StoreRef removeDeadBindings(Store store, const StackFrameContext *LCtx,
790 SymbolReaper& SymReaper) override;
791
792 //===------------------------------------------------------------------===//
793 // Utility methods.
794 //===------------------------------------------------------------------===//
795
796 RegionBindingsRef getRegionBindings(Store store) const {
797 llvm::PointerIntPair<Store, 1, bool> Ptr;
798 Ptr.setFromOpaqueValue(const_cast<void *>(store));
799 return {CBFactory,
800 static_cast<const RegionBindings::TreeTy *>(Ptr.getPointer()),
801 RBFactory.getTreeFactory(), Ptr.getInt()};
802 }
803
804 LimitedRegionBindingsRef
805 getRegionBindings(Store store,
806 SmallVectorImpl<SVal> &EscapedValuesDuringBind) const {
807 return LimitedRegionBindingsRef(
808 getRegionBindings(store), EscapedValuesDuringBind,
809 /*BindingsLeft=*/RegionStoreMaxBindingFanOutPlusOne);
810 }
811
812 void printJson(raw_ostream &Out, Store S, const char *NL = "\n",
813 unsigned int Space = 0, bool IsDot = false) const override;
814
815 void iterBindings(Store store, BindingsHandler& f) override {
816 RegionBindingsRef B = getRegionBindings(store);
817 for (const auto &[Region, Cluster] : B) {
818 for (const auto &[Key, Value] : Cluster) {
819 if (!Key.isDirect())
820 continue;
821 if (const SubRegion *R = dyn_cast<SubRegion>(Val: Key.getRegion())) {
822 // FIXME: Possibly incorporate the offset?
823 if (!f.HandleBinding(SMgr&: *this, store, region: R, val: Value))
824 return;
825 }
826 }
827 }
828 }
829};
830
831} // end anonymous namespace
832
833//===----------------------------------------------------------------------===//
834// RegionStore creation.
835//===----------------------------------------------------------------------===//
836
837std::unique_ptr<StoreManager>
838ento::CreateRegionStoreManager(ProgramStateManager &StMgr) {
839 return std::make_unique<RegionStoreManager>(args&: StMgr);
840}
841
842//===----------------------------------------------------------------------===//
843// Region Cluster analysis.
844//===----------------------------------------------------------------------===//
845
846namespace {
847/// Used to determine which global regions are automatically included in the
848/// initial worklist of a ClusterAnalysis.
849enum GlobalsFilterKind {
850 /// Don't include any global regions.
851 GFK_None,
852 /// Only include system globals.
853 GFK_SystemOnly,
854 /// Include all global regions.
855 GFK_All
856};
857
858template <typename DERIVED>
859class ClusterAnalysis {
860protected:
861 typedef llvm::DenseMap<const MemRegion *, const ClusterBindings *> ClusterMap;
862 typedef const MemRegion * WorkListElement;
863 typedef SmallVector<WorkListElement, 10> WorkList;
864
865 llvm::SmallPtrSet<const ClusterBindings *, 16> Visited;
866
867 WorkList WL;
868
869 RegionStoreManager &RM;
870 ASTContext &Ctx;
871 SValBuilder &svalBuilder;
872
873 RegionBindingsRef B;
874
875
876protected:
877 const ClusterBindings *getCluster(const MemRegion *R) {
878 return B.lookup(K: R);
879 }
880
881 /// Returns true if all clusters in the given memspace should be initially
882 /// included in the cluster analysis. Subclasses may provide their
883 /// own implementation.
884 bool includeEntireMemorySpace(const MemRegion *Base) {
885 return false;
886 }
887
888public:
889 ClusterAnalysis(RegionStoreManager &rm, ProgramStateManager &StateMgr,
890 RegionBindingsRef b)
891 : RM(rm), Ctx(StateMgr.getContext()),
892 svalBuilder(StateMgr.getSValBuilder()), B(std::move(b)) {}
893
894 RegionBindingsRef getRegionBindings() const { return B; }
895
896 bool isVisited(const MemRegion *R) {
897 return Visited.count(Ptr: getCluster(R));
898 }
899
900 void GenerateClusters() {
901 // Scan the entire set of bindings and record the region clusters.
902 for (RegionBindingsRef::iterator RI = B.begin(), RE = B.end();
903 RI != RE; ++RI){
904 const MemRegion *Base = RI.getKey();
905
906 const ClusterBindings &Cluster = RI.getData();
907 assert(!Cluster.isEmpty() && "Empty clusters should be removed");
908 static_cast<DERIVED*>(this)->VisitAddedToCluster(Base, Cluster);
909
910 // If the base's memspace should be entirely invalidated, add the cluster
911 // to the workspace up front.
912 if (static_cast<DERIVED*>(this)->includeEntireMemorySpace(Base))
913 AddToWorkList(WorkListElement(Base), &Cluster);
914 }
915 }
916
917 bool AddToWorkList(WorkListElement E, const ClusterBindings *C) {
918 if (C && !Visited.insert(Ptr: C).second)
919 return false;
920 WL.push_back(Elt: E);
921 return true;
922 }
923
924 bool AddToWorkList(const MemRegion *R) {
925 return static_cast<DERIVED*>(this)->AddToWorkList(R);
926 }
927
928 void RunWorkList() {
929 while (!WL.empty()) {
930 WorkListElement E = WL.pop_back_val();
931 const MemRegion *BaseR = E;
932
933 static_cast<DERIVED*>(this)->VisitCluster(BaseR, getCluster(R: BaseR));
934 }
935 }
936
937 void VisitAddedToCluster(const MemRegion *baseR, const ClusterBindings &C) {}
938 void VisitCluster(const MemRegion *baseR, const ClusterBindings *C) {}
939
940 void VisitCluster(const MemRegion *BaseR, const ClusterBindings *C,
941 bool Flag) {
942 static_cast<DERIVED*>(this)->VisitCluster(BaseR, C);
943 }
944};
945}
946
947//===----------------------------------------------------------------------===//
948// Binding invalidation.
949//===----------------------------------------------------------------------===//
950
951bool RegionStoreManager::scanReachableSymbols(Store S, const MemRegion *R,
952 ScanReachableSymbols &Callbacks) {
953 assert(R == R->getBaseRegion() && "Should only be called for base regions");
954 RegionBindingsRef B = getRegionBindings(store: S);
955 const ClusterBindings *Cluster = B.lookup(K: R);
956
957 if (!Cluster)
958 return true;
959
960 for (ClusterBindings::iterator RI = Cluster->begin(), RE = Cluster->end();
961 RI != RE; ++RI) {
962 if (!Callbacks.scan(val: RI.getData()))
963 return false;
964 }
965
966 return true;
967}
968
969static inline bool isUnionField(const FieldRegion *FR) {
970 return FR->getDecl()->getParent()->isUnion();
971}
972
973typedef SmallVector<const FieldDecl *, 8> FieldVector;
974
975static void getSymbolicOffsetFields(BindingKey K, FieldVector &Fields) {
976 assert(K.hasSymbolicOffset() && "Not implemented for concrete offset keys");
977
978 const MemRegion *Base = K.getConcreteOffsetRegion();
979 const MemRegion *R = K.getRegion();
980
981 while (R != Base) {
982 if (const FieldRegion *FR = dyn_cast<FieldRegion>(Val: R))
983 if (!isUnionField(FR))
984 Fields.push_back(Elt: FR->getDecl());
985
986 R = cast<SubRegion>(Val: R)->getSuperRegion();
987 }
988}
989
990static bool isCompatibleWithFields(BindingKey K, const FieldVector &Fields) {
991 assert(K.hasSymbolicOffset() && "Not implemented for concrete offset keys");
992
993 if (Fields.empty())
994 return true;
995
996 FieldVector FieldsInBindingKey;
997 getSymbolicOffsetFields(K, Fields&: FieldsInBindingKey);
998
999 ptrdiff_t Delta = FieldsInBindingKey.size() - Fields.size();
1000 if (Delta >= 0)
1001 return std::equal(first1: FieldsInBindingKey.begin() + Delta,
1002 last1: FieldsInBindingKey.end(),
1003 first2: Fields.begin());
1004 else
1005 return std::equal(first1: FieldsInBindingKey.begin(), last1: FieldsInBindingKey.end(),
1006 first2: Fields.begin() - Delta);
1007}
1008
1009/// Collects all bindings in \p Cluster that may refer to bindings within
1010/// \p Top.
1011///
1012/// Each binding is a pair whose \c first is the key (a BindingKey) and whose
1013/// \c second is the value (an SVal).
1014///
1015/// The \p IncludeAllDefaultBindings parameter specifies whether to include
1016/// default bindings that may extend beyond \p Top itself, e.g. if \p Top is
1017/// an aggregate within a larger aggregate with a default binding.
1018static void
1019collectSubRegionBindings(SmallVectorImpl<BindingPair> &Bindings,
1020 SValBuilder &SVB, const ClusterBindings &Cluster,
1021 const SubRegion *Top, BindingKey TopKey,
1022 bool IncludeAllDefaultBindings) {
1023 FieldVector FieldsInSymbolicSubregions;
1024 if (TopKey.hasSymbolicOffset()) {
1025 getSymbolicOffsetFields(K: TopKey, Fields&: FieldsInSymbolicSubregions);
1026 Top = TopKey.getConcreteOffsetRegion();
1027 TopKey = BindingKey::Make(R: Top, k: BindingKey::Default);
1028 }
1029
1030 // Find the length (in bits) of the region being invalidated.
1031 uint64_t Length = UINT64_MAX;
1032 SVal Extent = Top->getMemRegionManager().getStaticSize(MR: Top, SVB);
1033 if (std::optional<nonloc::ConcreteInt> ExtentCI =
1034 Extent.getAs<nonloc::ConcreteInt>()) {
1035 const llvm::APSInt &ExtentInt = ExtentCI->getValue();
1036 assert(ExtentInt.isNonNegative() || ExtentInt.isUnsigned());
1037 // Extents are in bytes but region offsets are in bits. Be careful!
1038 Length = ExtentInt.getLimitedValue() * SVB.getContext().getCharWidth();
1039 } else if (const FieldRegion *FR = dyn_cast<FieldRegion>(Val: Top)) {
1040 if (FR->getDecl()->isBitField())
1041 Length = FR->getDecl()->getBitWidthValue();
1042 }
1043
1044 for (const auto &StoreEntry : Cluster) {
1045 BindingKey NextKey = StoreEntry.first;
1046 if (NextKey.getRegion() == TopKey.getRegion()) {
1047 // FIXME: This doesn't catch the case where we're really invalidating a
1048 // region with a symbolic offset. Example:
1049 // R: points[i].y
1050 // Next: points[0].x
1051
1052 if (NextKey.getOffset() > TopKey.getOffset() &&
1053 NextKey.getOffset() - TopKey.getOffset() < Length) {
1054 // Case 1: The next binding is inside the region we're invalidating.
1055 // Include it.
1056 Bindings.push_back(Elt: StoreEntry);
1057
1058 } else if (NextKey.getOffset() == TopKey.getOffset()) {
1059 // Case 2: The next binding is at the same offset as the region we're
1060 // invalidating. In this case, we need to leave default bindings alone,
1061 // since they may be providing a default value for a regions beyond what
1062 // we're invalidating.
1063 // FIXME: This is probably incorrect; consider invalidating an outer
1064 // struct whose first field is bound to a LazyCompoundVal.
1065 if (IncludeAllDefaultBindings || NextKey.isDirect())
1066 Bindings.push_back(Elt: StoreEntry);
1067 }
1068
1069 } else if (NextKey.hasSymbolicOffset()) {
1070 const MemRegion *Base = NextKey.getConcreteOffsetRegion();
1071 if (Top->isSubRegionOf(R: Base) && Top != Base) {
1072 // Case 3: The next key is symbolic and we just changed something within
1073 // its concrete region. We don't know if the binding is still valid, so
1074 // we'll be conservative and include it.
1075 if (IncludeAllDefaultBindings || NextKey.isDirect())
1076 if (isCompatibleWithFields(K: NextKey, Fields: FieldsInSymbolicSubregions))
1077 Bindings.push_back(Elt: StoreEntry);
1078 } else if (const SubRegion *BaseSR = dyn_cast<SubRegion>(Val: Base)) {
1079 // Case 4: The next key is symbolic, but we changed a known
1080 // super-region. In this case the binding is certainly included.
1081 if (BaseSR->isSubRegionOf(R: Top))
1082 if (isCompatibleWithFields(K: NextKey, Fields: FieldsInSymbolicSubregions))
1083 Bindings.push_back(Elt: StoreEntry);
1084 }
1085 }
1086 }
1087}
1088
1089static void
1090collectSubRegionBindings(SmallVectorImpl<BindingPair> &Bindings,
1091 SValBuilder &SVB, const ClusterBindings &Cluster,
1092 const SubRegion *Top, bool IncludeAllDefaultBindings) {
1093 collectSubRegionBindings(Bindings, SVB, Cluster, Top,
1094 TopKey: BindingKey::Make(R: Top, k: BindingKey::Default),
1095 IncludeAllDefaultBindings);
1096}
1097
1098LimitedRegionBindingsRef
1099RegionStoreManager::removeSubRegionBindings(LimitedRegionBindingsConstRef B,
1100 const SubRegion *Top) {
1101 BindingKey TopKey = BindingKey::Make(R: Top, k: BindingKey::Default);
1102 const MemRegion *ClusterHead = TopKey.getBaseRegion();
1103
1104 if (Top == ClusterHead) {
1105 // We can remove an entire cluster's bindings all in one go.
1106 return B.removeCluster(BaseRegion: Top);
1107 }
1108
1109 const ClusterBindings *Cluster = B.lookup(K: ClusterHead);
1110 if (!Cluster) {
1111 // If we're invalidating a region with a symbolic offset, we need to make
1112 // sure we don't treat the base region as uninitialized anymore.
1113 if (TopKey.hasSymbolicOffset()) {
1114 const SubRegion *Concrete = TopKey.getConcreteOffsetRegion();
1115 return B.addBinding(R: Concrete, k: BindingKey::Default, V: UnknownVal());
1116 }
1117 return B;
1118 }
1119
1120 SmallVector<BindingPair, 32> Bindings;
1121 collectSubRegionBindings(Bindings, SVB&: svalBuilder, Cluster: *Cluster, Top, TopKey,
1122 /*IncludeAllDefaultBindings=*/false);
1123
1124 ClusterBindingsRef Result(*Cluster, CBFactory);
1125 for (BindingKey Key : llvm::make_first_range(c&: Bindings))
1126 Result = Result.remove(K: Key);
1127
1128 // If we're invalidating a region with a symbolic offset, we need to make sure
1129 // we don't treat the base region as uninitialized anymore.
1130 // FIXME: This isn't very precise; see the example in
1131 // collectSubRegionBindings.
1132 if (TopKey.hasSymbolicOffset()) {
1133 const SubRegion *Concrete = TopKey.getConcreteOffsetRegion();
1134 Result = Result.add(K: BindingKey::Make(R: Concrete, k: BindingKey::Default),
1135 D: UnknownVal());
1136 }
1137
1138 if (Result.isEmpty())
1139 return B.removeCluster(BaseRegion: ClusterHead);
1140 return B.addWithoutDecreasingLimit(BaseRegion: ClusterHead, BindingKeyAndValue: Result.asImmutableMap());
1141}
1142
1143namespace {
1144class InvalidateRegionsWorker : public ClusterAnalysis<InvalidateRegionsWorker>
1145{
1146 ConstCFGElementRef Elem;
1147 unsigned Count;
1148 const LocationContext *LCtx;
1149 InvalidatedSymbols &IS;
1150 RegionAndSymbolInvalidationTraits &ITraits;
1151 StoreManager::InvalidatedRegions *Regions;
1152 GlobalsFilterKind GlobalsFilter;
1153public:
1154 InvalidateRegionsWorker(RegionStoreManager &rm, ProgramStateManager &stateMgr,
1155 RegionBindingsRef b, ConstCFGElementRef elem,
1156 unsigned count, const LocationContext *lctx,
1157 InvalidatedSymbols &is,
1158 RegionAndSymbolInvalidationTraits &ITraitsIn,
1159 StoreManager::InvalidatedRegions *r,
1160 GlobalsFilterKind GFK)
1161 : ClusterAnalysis<InvalidateRegionsWorker>(rm, stateMgr, b), Elem(elem),
1162 Count(count), LCtx(lctx), IS(is), ITraits(ITraitsIn), Regions(r),
1163 GlobalsFilter(GFK) {}
1164
1165 void VisitCluster(const MemRegion *baseR, const ClusterBindings *C);
1166 void VisitBinding(SVal V);
1167
1168 using ClusterAnalysis::AddToWorkList;
1169
1170 bool AddToWorkList(const MemRegion *R);
1171
1172 /// Returns true if all clusters in the memory space for \p Base should be
1173 /// be invalidated.
1174 bool includeEntireMemorySpace(const MemRegion *Base);
1175
1176 /// Returns true if the memory space of the given region is one of the global
1177 /// regions specially included at the start of invalidation.
1178 bool isInitiallyIncludedGlobalRegion(const MemRegion *R);
1179};
1180}
1181
1182bool InvalidateRegionsWorker::AddToWorkList(const MemRegion *R) {
1183 bool doNotInvalidateSuperRegion = ITraits.hasTrait(
1184 MR: R, IK: RegionAndSymbolInvalidationTraits::TK_DoNotInvalidateSuperRegion);
1185 const MemRegion *BaseR = doNotInvalidateSuperRegion ? R : R->getBaseRegion();
1186 return AddToWorkList(E: WorkListElement(BaseR), C: getCluster(R: BaseR));
1187}
1188
1189void InvalidateRegionsWorker::VisitBinding(SVal V) {
1190 // A symbol? Mark it touched by the invalidation.
1191 if (SymbolRef Sym = V.getAsSymbol())
1192 IS.insert(V: Sym);
1193
1194 if (const MemRegion *R = V.getAsRegion()) {
1195 AddToWorkList(R);
1196 return;
1197 }
1198
1199 // Is it a LazyCompoundVal? All references get invalidated as well.
1200 if (std::optional<nonloc::LazyCompoundVal> LCS =
1201 V.getAs<nonloc::LazyCompoundVal>()) {
1202
1203 // `getInterestingValues()` returns SVals contained within LazyCompoundVals,
1204 // so there is no need to visit them.
1205 for (SVal V : RM.getInterestingValues(LCV: *LCS))
1206 if (!isa<nonloc::LazyCompoundVal>(Val: V))
1207 VisitBinding(V);
1208
1209 return;
1210 }
1211}
1212
1213void InvalidateRegionsWorker::VisitCluster(const MemRegion *baseR,
1214 const ClusterBindings *C) {
1215
1216 bool PreserveRegionsContents =
1217 ITraits.hasTrait(MR: baseR,
1218 IK: RegionAndSymbolInvalidationTraits::TK_PreserveContents);
1219
1220 if (C) {
1221 for (SVal Val : llvm::make_second_range(c: *C))
1222 VisitBinding(V: Val);
1223
1224 // Invalidate regions contents.
1225 if (!PreserveRegionsContents)
1226 B = B.removeCluster(BaseRegion: baseR);
1227 }
1228
1229 if (const auto *TO = dyn_cast<TypedValueRegion>(Val: baseR)) {
1230 if (const auto *RD = TO->getValueType()->getAsCXXRecordDecl()) {
1231
1232 // Lambdas can affect all static local variables without explicitly
1233 // capturing those.
1234 // We invalidate all static locals referenced inside the lambda body.
1235 if (RD->isLambda() && RD->getLambdaCallOperator()->getBody()) {
1236 using namespace ast_matchers;
1237
1238 const char *DeclBind = "DeclBind";
1239 StatementMatcher RefToStatic = stmt(hasDescendant(declRefExpr(
1240 to(InnerMatcher: varDecl(hasStaticStorageDuration()).bind(ID: DeclBind)))));
1241 auto Matches =
1242 match(Matcher: RefToStatic, Node: *RD->getLambdaCallOperator()->getBody(),
1243 Context&: RD->getASTContext());
1244
1245 for (BoundNodes &Match : Matches) {
1246 auto *VD = Match.getNodeAs<VarDecl>(ID: DeclBind);
1247 const VarRegion *ToInvalidate =
1248 RM.getRegionManager().getVarRegion(VD, LC: LCtx);
1249 AddToWorkList(R: ToInvalidate);
1250 }
1251 }
1252 }
1253 }
1254
1255 // BlockDataRegion? If so, invalidate captured variables that are passed
1256 // by reference.
1257 if (const BlockDataRegion *BR = dyn_cast<BlockDataRegion>(Val: baseR)) {
1258 for (auto Var : BR->referenced_vars()) {
1259 const VarRegion *VR = Var.getCapturedRegion();
1260 const VarDecl *VD = VR->getDecl();
1261 if (VD->hasAttr<BlocksAttr>() || !VD->hasLocalStorage()) {
1262 AddToWorkList(R: VR);
1263 }
1264 else if (Loc::isLocType(T: VR->getValueType())) {
1265 // Map the current bindings to a Store to retrieve the value
1266 // of the binding. If that binding itself is a region, we should
1267 // invalidate that region. This is because a block may capture
1268 // a pointer value, but the thing pointed by that pointer may
1269 // get invalidated.
1270 SVal V = RM.getBinding(B, L: loc::MemRegionVal(VR));
1271 if (std::optional<Loc> L = V.getAs<Loc>()) {
1272 if (const MemRegion *LR = L->getAsRegion())
1273 AddToWorkList(R: LR);
1274 }
1275 }
1276 }
1277 return;
1278 }
1279
1280 // Symbolic region?
1281 if (const SymbolicRegion *SR = dyn_cast<SymbolicRegion>(Val: baseR))
1282 IS.insert(V: SR->getSymbol());
1283
1284 // Nothing else should be done in the case when we preserve regions context.
1285 if (PreserveRegionsContents)
1286 return;
1287
1288 // Otherwise, we have a normal data region. Record that we touched the region.
1289 if (Regions)
1290 Regions->push_back(Elt: baseR);
1291
1292 if (isa<AllocaRegion, SymbolicRegion>(Val: baseR)) {
1293 // Invalidate the region by setting its default value to
1294 // conjured symbol. The type of the symbol is irrelevant.
1295 DefinedOrUnknownSVal V =
1296 svalBuilder.conjureSymbolVal(symbolTag: baseR, elem: Elem, LCtx, type: Ctx.IntTy, count: Count);
1297 B = B.addBinding(R: baseR, k: BindingKey::Default, V);
1298 return;
1299 }
1300
1301 if (!baseR->isBoundable())
1302 return;
1303
1304 const TypedValueRegion *TR = cast<TypedValueRegion>(Val: baseR);
1305 QualType T = TR->getValueType();
1306
1307 if (isInitiallyIncludedGlobalRegion(R: baseR)) {
1308 // If the region is a global and we are invalidating all globals,
1309 // erasing the entry is good enough. This causes all globals to be lazily
1310 // symbolicated from the same base symbol.
1311 return;
1312 }
1313
1314 if (T->isRecordType()) {
1315 // Invalidate the region by setting its default value to
1316 // conjured symbol. The type of the symbol is irrelevant.
1317 DefinedOrUnknownSVal V =
1318 svalBuilder.conjureSymbolVal(symbolTag: baseR, elem: Elem, LCtx, type: Ctx.IntTy, count: Count);
1319 B = B.addBinding(R: baseR, k: BindingKey::Default, V);
1320 return;
1321 }
1322
1323 if (const ArrayType *AT = Ctx.getAsArrayType(T)) {
1324 bool doNotInvalidateSuperRegion = ITraits.hasTrait(
1325 MR: baseR,
1326 IK: RegionAndSymbolInvalidationTraits::TK_DoNotInvalidateSuperRegion);
1327
1328 if (doNotInvalidateSuperRegion) {
1329 // We are not doing blank invalidation of the whole array region so we
1330 // have to manually invalidate each elements.
1331 std::optional<uint64_t> NumElements;
1332
1333 // Compute lower and upper offsets for region within array.
1334 if (const ConstantArrayType *CAT = dyn_cast<ConstantArrayType>(Val: AT))
1335 NumElements = CAT->getZExtSize();
1336 if (!NumElements) // We are not dealing with a constant size array
1337 goto conjure_default;
1338 QualType ElementTy = AT->getElementType();
1339 uint64_t ElemSize = Ctx.getTypeSize(T: ElementTy);
1340 const RegionOffset &RO = baseR->getAsOffset();
1341 const MemRegion *SuperR = baseR->getBaseRegion();
1342 if (RO.hasSymbolicOffset()) {
1343 // If base region has a symbolic offset,
1344 // we revert to invalidating the super region.
1345 if (SuperR)
1346 AddToWorkList(R: SuperR);
1347 goto conjure_default;
1348 }
1349
1350 uint64_t LowerOffset = RO.getOffset();
1351 uint64_t UpperOffset = LowerOffset + *NumElements * ElemSize;
1352 bool UpperOverflow = UpperOffset < LowerOffset;
1353
1354 // Invalidate regions which are within array boundaries,
1355 // or have a symbolic offset.
1356 if (!SuperR)
1357 goto conjure_default;
1358
1359 const ClusterBindings *C = B.lookup(K: SuperR);
1360 if (!C)
1361 goto conjure_default;
1362
1363 for (const auto &[BK, V] : *C) {
1364 std::optional<uint64_t> ROffset =
1365 BK.hasSymbolicOffset() ? std::optional<uint64_t>() : BK.getOffset();
1366
1367 // Check offset is not symbolic and within array's boundaries.
1368 // Handles arrays of 0 elements and of 0-sized elements as well.
1369 if (!ROffset ||
1370 ((*ROffset >= LowerOffset && *ROffset < UpperOffset) ||
1371 (UpperOverflow &&
1372 (*ROffset >= LowerOffset || *ROffset < UpperOffset)) ||
1373 (LowerOffset == UpperOffset && *ROffset == LowerOffset))) {
1374 B = B.removeBinding(K: BK);
1375 // Bound symbolic regions need to be invalidated for dead symbol
1376 // detection.
1377 const MemRegion *R = V.getAsRegion();
1378 if (isa_and_nonnull<SymbolicRegion>(Val: R))
1379 VisitBinding(V);
1380 }
1381 }
1382 }
1383 conjure_default:
1384 // Set the default value of the array to conjured symbol.
1385 DefinedOrUnknownSVal V = svalBuilder.conjureSymbolVal(
1386 symbolTag: baseR, elem: Elem, LCtx, type: AT->getElementType(), count: Count);
1387 B = B.addBinding(R: baseR, k: BindingKey::Default, V);
1388 return;
1389 }
1390
1391 DefinedOrUnknownSVal V =
1392 svalBuilder.conjureSymbolVal(symbolTag: baseR, elem: Elem, LCtx, type: T, count: Count);
1393 assert(SymbolManager::canSymbolicate(T) || V.isUnknown());
1394 B = B.addBinding(R: baseR, k: BindingKey::Direct, V);
1395}
1396
1397bool InvalidateRegionsWorker::isInitiallyIncludedGlobalRegion(
1398 const MemRegion *R) {
1399 switch (GlobalsFilter) {
1400 case GFK_None:
1401 return false;
1402 case GFK_SystemOnly:
1403 return isa<GlobalSystemSpaceRegion>(Val: R->getRawMemorySpace());
1404 case GFK_All:
1405 return isa<NonStaticGlobalSpaceRegion>(Val: R->getRawMemorySpace());
1406 }
1407
1408 llvm_unreachable("unknown globals filter");
1409}
1410
1411bool InvalidateRegionsWorker::includeEntireMemorySpace(const MemRegion *Base) {
1412 if (isInitiallyIncludedGlobalRegion(R: Base))
1413 return true;
1414
1415 const MemSpaceRegion *MemSpace = Base->getRawMemorySpace();
1416 return ITraits.hasTrait(MR: MemSpace,
1417 IK: RegionAndSymbolInvalidationTraits::TK_EntireMemSpace);
1418}
1419
1420RegionBindingsRef RegionStoreManager::invalidateGlobalRegion(
1421 MemRegion::Kind K, ConstCFGElementRef Elem, unsigned Count,
1422 const LocationContext *LCtx, RegionBindingsRef B,
1423 InvalidatedRegions *Invalidated) {
1424 // Bind the globals memory space to a new symbol that we will use to derive
1425 // the bindings for all globals.
1426 const GlobalsSpaceRegion *GS = MRMgr.getGlobalsRegion(K);
1427 SVal V = svalBuilder.conjureSymbolVal(
1428 /* symbolTag = */ (const void *)GS, elem: Elem, LCtx,
1429 /* type does not matter */ type: Ctx.IntTy, count: Count);
1430
1431 B = B.removeBinding(R: GS)
1432 .addBinding(K: BindingKey::Make(R: GS, k: BindingKey::Default), V);
1433
1434 // Even if there are no bindings in the global scope, we still need to
1435 // record that we touched it.
1436 if (Invalidated)
1437 Invalidated->push_back(Elt: GS);
1438
1439 return B;
1440}
1441
1442void RegionStoreManager::populateWorkList(InvalidateRegionsWorker &W,
1443 ArrayRef<SVal> Values,
1444 InvalidatedRegions *TopLevelRegions) {
1445 for (SVal V : Values) {
1446 if (auto LCS = V.getAs<nonloc::LazyCompoundVal>()) {
1447 for (SVal S : getInterestingValues(LCV: *LCS))
1448 if (const MemRegion *R = S.getAsRegion())
1449 W.AddToWorkList(R);
1450
1451 continue;
1452 }
1453
1454 if (const MemRegion *R = V.getAsRegion()) {
1455 if (TopLevelRegions)
1456 TopLevelRegions->push_back(Elt: R);
1457 W.AddToWorkList(R);
1458 continue;
1459 }
1460 }
1461}
1462
1463StoreRef RegionStoreManager::invalidateRegions(
1464 Store store, ArrayRef<SVal> Values, ConstCFGElementRef Elem, unsigned Count,
1465 const LocationContext *LCtx, const CallEvent *Call, InvalidatedSymbols &IS,
1466 RegionAndSymbolInvalidationTraits &ITraits,
1467 InvalidatedRegions *TopLevelRegions, InvalidatedRegions *Invalidated) {
1468 GlobalsFilterKind GlobalsFilter;
1469 if (Call) {
1470 if (Call->isInSystemHeader())
1471 GlobalsFilter = GFK_SystemOnly;
1472 else
1473 GlobalsFilter = GFK_All;
1474 } else {
1475 GlobalsFilter = GFK_None;
1476 }
1477
1478 RegionBindingsRef B = getRegionBindings(store);
1479 InvalidateRegionsWorker W(*this, StateMgr, B, Elem, Count, LCtx, IS, ITraits,
1480 Invalidated, GlobalsFilter);
1481
1482 // Scan the bindings and generate the clusters.
1483 W.GenerateClusters();
1484
1485 // Add the regions to the worklist.
1486 populateWorkList(W, Values, TopLevelRegions);
1487
1488 W.RunWorkList();
1489
1490 // Return the new bindings.
1491 B = W.getRegionBindings();
1492
1493 // For calls, determine which global regions should be invalidated and
1494 // invalidate them. (Note that function-static and immutable globals are never
1495 // invalidated by this.)
1496 // TODO: This could possibly be more precise with modules.
1497 switch (GlobalsFilter) {
1498 case GFK_All:
1499 B = invalidateGlobalRegion(K: MemRegion::GlobalInternalSpaceRegionKind, Elem,
1500 Count, LCtx, B, Invalidated);
1501 [[fallthrough]];
1502 case GFK_SystemOnly:
1503 B = invalidateGlobalRegion(K: MemRegion::GlobalSystemSpaceRegionKind, Elem,
1504 Count, LCtx, B, Invalidated);
1505 [[fallthrough]];
1506 case GFK_None:
1507 break;
1508 }
1509
1510 return StoreRef(B.asStore(), *this);
1511}
1512
1513//===----------------------------------------------------------------------===//
1514// Location and region casting.
1515//===----------------------------------------------------------------------===//
1516
1517/// ArrayToPointer - Emulates the "decay" of an array to a pointer
1518/// type. 'Array' represents the lvalue of the array being decayed
1519/// to a pointer, and the returned SVal represents the decayed
1520/// version of that lvalue (i.e., a pointer to the first element of
1521/// the array). This is called by ExprEngine when evaluating casts
1522/// from arrays to pointers.
1523SVal RegionStoreManager::ArrayToPointer(Loc Array, QualType T) {
1524 if (isa<loc::ConcreteInt>(Val: Array))
1525 return Array;
1526
1527 if (!isa<loc::MemRegionVal>(Val: Array))
1528 return UnknownVal();
1529
1530 const SubRegion *R =
1531 cast<SubRegion>(Val: Array.castAs<loc::MemRegionVal>().getRegion());
1532 NonLoc ZeroIdx = svalBuilder.makeZeroArrayIndex();
1533 return loc::MemRegionVal(MRMgr.getElementRegion(elementType: T, Idx: ZeroIdx, superRegion: R, Ctx));
1534}
1535
1536//===----------------------------------------------------------------------===//
1537// Loading values from regions.
1538//===----------------------------------------------------------------------===//
1539
1540SVal RegionStoreManager::getBinding(RegionBindingsConstRef B, Loc L, QualType T) {
1541 assert(!isa<UnknownVal>(L) && "location unknown");
1542 assert(!isa<UndefinedVal>(L) && "location undefined");
1543
1544 // For access to concrete addresses, return UnknownVal. Checks
1545 // for null dereferences (and similar errors) are done by checkers, not
1546 // the Store.
1547 // FIXME: We can consider lazily symbolicating such memory, but we really
1548 // should defer this when we can reason easily about symbolicating arrays
1549 // of bytes.
1550 if (L.getAs<loc::ConcreteInt>()) {
1551 return UnknownVal();
1552 }
1553 if (!L.getAs<loc::MemRegionVal>()) {
1554 return UnknownVal();
1555 }
1556
1557 const MemRegion *MR = L.castAs<loc::MemRegionVal>().getRegion();
1558
1559 if (isa<BlockDataRegion>(Val: MR)) {
1560 return UnknownVal();
1561 }
1562
1563 // Auto-detect the binding type.
1564 if (T.isNull()) {
1565 if (const auto *TVR = dyn_cast<TypedValueRegion>(Val: MR))
1566 T = TVR->getValueType();
1567 else if (const auto *TR = dyn_cast<TypedRegion>(Val: MR))
1568 T = TR->getLocationType()->getPointeeType();
1569 else if (const auto *SR = dyn_cast<SymbolicRegion>(Val: MR))
1570 T = SR->getPointeeStaticType();
1571 }
1572 assert(!T.isNull() && "Unable to auto-detect binding type!");
1573 assert(!T->isVoidType() && "Attempting to dereference a void pointer!");
1574
1575 if (!isa<TypedValueRegion>(Val: MR))
1576 MR = GetElementZeroRegion(R: cast<SubRegion>(Val: MR), T);
1577
1578 // FIXME: Perhaps this method should just take a 'const MemRegion*' argument
1579 // instead of 'Loc', and have the other Loc cases handled at a higher level.
1580 const TypedValueRegion *R = cast<TypedValueRegion>(Val: MR);
1581 QualType RTy = R->getValueType();
1582
1583 // FIXME: we do not yet model the parts of a complex type, so treat the
1584 // whole thing as "unknown".
1585 if (RTy->isAnyComplexType())
1586 return UnknownVal();
1587
1588 // FIXME: We should eventually handle funny addressing. e.g.:
1589 //
1590 // int x = ...;
1591 // int *p = &x;
1592 // char *q = (char*) p;
1593 // char c = *q; // returns the first byte of 'x'.
1594 //
1595 // Such funny addressing will occur due to layering of regions.
1596 if (RTy->isStructureOrClassType())
1597 return getBindingForStruct(B, R);
1598
1599 // FIXME: Handle unions.
1600 if (RTy->isUnionType())
1601 return createLazyBinding(B, R);
1602
1603 if (RTy->isArrayType()) {
1604 if (RTy->isConstantArrayType())
1605 return getBindingForArray(B, R);
1606 else
1607 return UnknownVal();
1608 }
1609
1610 // FIXME: handle Vector types.
1611 if (RTy->isVectorType())
1612 return UnknownVal();
1613
1614 if (const FieldRegion* FR = dyn_cast<FieldRegion>(Val: R))
1615 return svalBuilder.evalCast(V: getBindingForField(B, R: FR), CastTy: T, OriginalTy: QualType{});
1616
1617 if (const ElementRegion* ER = dyn_cast<ElementRegion>(Val: R)) {
1618 // FIXME: Here we actually perform an implicit conversion from the loaded
1619 // value to the element type. Eventually we want to compose these values
1620 // more intelligently. For example, an 'element' can encompass multiple
1621 // bound regions (e.g., several bound bytes), or could be a subset of
1622 // a larger value.
1623 return svalBuilder.evalCast(V: getBindingForElement(B, R: ER), CastTy: T, OriginalTy: QualType{});
1624 }
1625
1626 if (const ObjCIvarRegion *IVR = dyn_cast<ObjCIvarRegion>(Val: R)) {
1627 // FIXME: Here we actually perform an implicit conversion from the loaded
1628 // value to the ivar type. What we should model is stores to ivars
1629 // that blow past the extent of the ivar. If the address of the ivar is
1630 // reinterpretted, it is possible we stored a different value that could
1631 // fit within the ivar. Either we need to cast these when storing them
1632 // or reinterpret them lazily (as we do here).
1633 return svalBuilder.evalCast(V: getBindingForObjCIvar(B, R: IVR), CastTy: T, OriginalTy: QualType{});
1634 }
1635
1636 if (const VarRegion *VR = dyn_cast<VarRegion>(Val: R)) {
1637 // FIXME: Here we actually perform an implicit conversion from the loaded
1638 // value to the variable type. What we should model is stores to variables
1639 // that blow past the extent of the variable. If the address of the
1640 // variable is reinterpretted, it is possible we stored a different value
1641 // that could fit within the variable. Either we need to cast these when
1642 // storing them or reinterpret them lazily (as we do here).
1643 return svalBuilder.evalCast(V: getBindingForVar(B, R: VR), CastTy: T, OriginalTy: QualType{});
1644 }
1645
1646 const SVal *V = B.lookup(R, k: BindingKey::Direct);
1647
1648 // Check if the region has a binding.
1649 if (V)
1650 return *V;
1651
1652 // The location does not have a bound value. This means that it has
1653 // the value it had upon its creation and/or entry to the analyzed
1654 // function/method. These are either symbolic values or 'undefined'.
1655 if (isa<StackLocalsSpaceRegion>(Val: R->getRawMemorySpace())) {
1656 // All stack variables are considered to have undefined values
1657 // upon creation. All heap allocated blocks are considered to
1658 // have undefined values as well unless they are explicitly bound
1659 // to specific values.
1660 return UndefinedVal();
1661 }
1662
1663 // All other values are symbolic.
1664 return svalBuilder.getRegionValueSymbolVal(region: R);
1665}
1666
1667static QualType getUnderlyingType(const SubRegion *R) {
1668 QualType RegionTy;
1669 if (const TypedValueRegion *TVR = dyn_cast<TypedValueRegion>(Val: R))
1670 RegionTy = TVR->getValueType();
1671
1672 if (const SymbolicRegion *SR = dyn_cast<SymbolicRegion>(Val: R))
1673 RegionTy = SR->getSymbol()->getType();
1674
1675 return RegionTy;
1676}
1677
1678/// Checks to see if store \p B has a lazy binding for region \p R.
1679///
1680/// If \p AllowSubregionBindings is \c false, a lazy binding will be rejected
1681/// if there are additional bindings within \p R.
1682///
1683/// Note that unlike RegionStoreManager::findLazyBinding, this will not search
1684/// for lazy bindings for super-regions of \p R.
1685static std::optional<nonloc::LazyCompoundVal>
1686getExistingLazyBinding(SValBuilder &SVB, RegionBindingsConstRef B,
1687 const SubRegion *R, bool AllowSubregionBindings) {
1688 std::optional<SVal> V = B.getDefaultBinding(R);
1689 if (!V)
1690 return std::nullopt;
1691
1692 std::optional<nonloc::LazyCompoundVal> LCV =
1693 V->getAs<nonloc::LazyCompoundVal>();
1694 if (!LCV)
1695 return std::nullopt;
1696
1697 // If the LCV is for a subregion, the types might not match, and we shouldn't
1698 // reuse the binding.
1699 QualType RegionTy = getUnderlyingType(R);
1700 if (!RegionTy.isNull() &&
1701 !RegionTy->isVoidPointerType()) {
1702 QualType SourceRegionTy = LCV->getRegion()->getValueType();
1703 if (!SVB.getContext().hasSameUnqualifiedType(T1: RegionTy, T2: SourceRegionTy))
1704 return std::nullopt;
1705 }
1706
1707 if (!AllowSubregionBindings) {
1708 // If there are any other bindings within this region, we shouldn't reuse
1709 // the top-level binding.
1710 SmallVector<BindingPair, 16> Bindings;
1711 collectSubRegionBindings(Bindings, SVB, Cluster: *B.lookup(K: R->getBaseRegion()), Top: R,
1712 /*IncludeAllDefaultBindings=*/true);
1713 if (Bindings.size() > 1)
1714 return std::nullopt;
1715 }
1716
1717 return *LCV;
1718}
1719
1720std::pair<Store, const SubRegion *>
1721RegionStoreManager::findLazyBinding(RegionBindingsConstRef B,
1722 const SubRegion *R,
1723 const SubRegion *originalRegion) {
1724 if (originalRegion != R) {
1725 if (std::optional<nonloc::LazyCompoundVal> V =
1726 getExistingLazyBinding(SVB&: svalBuilder, B, R, AllowSubregionBindings: true))
1727 return std::make_pair(x: V->getStore(), y: V->getRegion());
1728 }
1729
1730 typedef std::pair<Store, const SubRegion *> StoreRegionPair;
1731 StoreRegionPair Result = StoreRegionPair();
1732
1733 if (const ElementRegion *ER = dyn_cast<ElementRegion>(Val: R)) {
1734 Result = findLazyBinding(B, R: cast<SubRegion>(Val: ER->getSuperRegion()),
1735 originalRegion);
1736
1737 if (Result.second)
1738 Result.second = MRMgr.getElementRegionWithSuper(ER, superRegion: Result.second);
1739
1740 } else if (const FieldRegion *FR = dyn_cast<FieldRegion>(Val: R)) {
1741 Result = findLazyBinding(B, R: cast<SubRegion>(Val: FR->getSuperRegion()),
1742 originalRegion);
1743
1744 if (Result.second)
1745 Result.second = MRMgr.getFieldRegionWithSuper(FR, superRegion: Result.second);
1746
1747 } else if (const CXXBaseObjectRegion *BaseReg =
1748 dyn_cast<CXXBaseObjectRegion>(Val: R)) {
1749 // C++ base object region is another kind of region that we should blast
1750 // through to look for lazy compound value. It is like a field region.
1751 Result = findLazyBinding(B, R: cast<SubRegion>(Val: BaseReg->getSuperRegion()),
1752 originalRegion);
1753
1754 if (Result.second)
1755 Result.second = MRMgr.getCXXBaseObjectRegionWithSuper(baseReg: BaseReg,
1756 superRegion: Result.second);
1757 }
1758
1759 return Result;
1760}
1761
1762/// This is a helper function for `getConstantValFromConstArrayInitializer`.
1763///
1764/// Return an array of extents of the declared array type.
1765///
1766/// E.g. for `int x[1][2][3];` returns { 1, 2, 3 }.
1767static SmallVector<uint64_t, 2>
1768getConstantArrayExtents(const ConstantArrayType *CAT) {
1769 assert(CAT && "ConstantArrayType should not be null");
1770 CAT = cast<ConstantArrayType>(Val: CAT->getCanonicalTypeInternal());
1771 SmallVector<uint64_t, 2> Extents;
1772 do {
1773 Extents.push_back(Elt: CAT->getZExtSize());
1774 } while ((CAT = dyn_cast<ConstantArrayType>(Val: CAT->getElementType())));
1775 return Extents;
1776}
1777
1778/// This is a helper function for `getConstantValFromConstArrayInitializer`.
1779///
1780/// Return an array of offsets from nested ElementRegions and a root base
1781/// region. The array is never empty and a base region is never null.
1782///
1783/// E.g. for `Element{Element{Element{VarRegion},1},2},3}` returns { 3, 2, 1 }.
1784/// This represents an access through indirection: `arr[1][2][3];`
1785///
1786/// \param ER The given (possibly nested) ElementRegion.
1787///
1788/// \note The result array is in the reverse order of indirection expression:
1789/// arr[1][2][3] -> { 3, 2, 1 }. This helps to provide complexity O(n), where n
1790/// is a number of indirections. It may not affect performance in real-life
1791/// code, though.
1792static std::pair<SmallVector<SVal, 2>, const MemRegion *>
1793getElementRegionOffsetsWithBase(const ElementRegion *ER) {
1794 assert(ER && "ConstantArrayType should not be null");
1795 const MemRegion *Base;
1796 SmallVector<SVal, 2> SValOffsets;
1797 do {
1798 SValOffsets.push_back(Elt: ER->getIndex());
1799 Base = ER->getSuperRegion();
1800 ER = dyn_cast<ElementRegion>(Val: Base);
1801 } while (ER);
1802 return {SValOffsets, Base};
1803}
1804
1805/// This is a helper function for `getConstantValFromConstArrayInitializer`.
1806///
1807/// Convert array of offsets from `SVal` to `uint64_t` in consideration of
1808/// respective array extents.
1809/// \param SrcOffsets [in] The array of offsets of type `SVal` in reversed
1810/// order (expectedly received from `getElementRegionOffsetsWithBase`).
1811/// \param ArrayExtents [in] The array of extents.
1812/// \param DstOffsets [out] The array of offsets of type `uint64_t`.
1813/// \returns:
1814/// - `std::nullopt` for successful convertion.
1815/// - `UndefinedVal` or `UnknownVal` otherwise. It's expected that this SVal
1816/// will be returned as a suitable value of the access operation.
1817/// which should be returned as a correct
1818///
1819/// \example:
1820/// const int arr[10][20][30] = {}; // ArrayExtents { 10, 20, 30 }
1821/// int x1 = arr[4][5][6]; // SrcOffsets { NonLoc(6), NonLoc(5), NonLoc(4) }
1822/// // DstOffsets { 4, 5, 6 }
1823/// // returns std::nullopt
1824/// int x2 = arr[42][5][-6]; // returns UndefinedVal
1825/// int x3 = arr[4][5][x2]; // returns UnknownVal
1826static std::optional<SVal>
1827convertOffsetsFromSvalToUnsigneds(const SmallVector<SVal, 2> &SrcOffsets,
1828 const SmallVector<uint64_t, 2> ArrayExtents,
1829 SmallVector<uint64_t, 2> &DstOffsets) {
1830 // Check offsets for being out of bounds.
1831 // C++20 [expr.add] 7.6.6.4 (excerpt):
1832 // If P points to an array element i of an array object x with n
1833 // elements, where i < 0 or i > n, the behavior is undefined.
1834 // Dereferencing is not allowed on the "one past the last
1835 // element", when i == n.
1836 // Example:
1837 // const int arr[3][2] = {{1, 2}, {3, 4}};
1838 // arr[0][0]; // 1
1839 // arr[0][1]; // 2
1840 // arr[0][2]; // UB
1841 // arr[1][0]; // 3
1842 // arr[1][1]; // 4
1843 // arr[1][-1]; // UB
1844 // arr[2][0]; // 0
1845 // arr[2][1]; // 0
1846 // arr[-2][0]; // UB
1847 DstOffsets.resize(N: SrcOffsets.size());
1848 auto ExtentIt = ArrayExtents.begin();
1849 auto OffsetIt = DstOffsets.begin();
1850 // Reverse `SValOffsets` to make it consistent with `ArrayExtents`.
1851 for (SVal V : llvm::reverse(C: SrcOffsets)) {
1852 if (auto CI = V.getAs<nonloc::ConcreteInt>()) {
1853 // When offset is out of array's bounds, result is UB.
1854 const llvm::APSInt &Offset = CI->getValue();
1855 if (Offset.isNegative() || Offset.uge(RHS: *(ExtentIt++)))
1856 return UndefinedVal();
1857 // Store index in a reversive order.
1858 *(OffsetIt++) = Offset.getZExtValue();
1859 continue;
1860 }
1861 // Symbolic index presented. Return Unknown value.
1862 // FIXME: We also need to take ElementRegions with symbolic indexes into
1863 // account.
1864 return UnknownVal();
1865 }
1866 return std::nullopt;
1867}
1868
1869std::optional<SVal> RegionStoreManager::getConstantValFromConstArrayInitializer(
1870 RegionBindingsConstRef B, const ElementRegion *R) {
1871 assert(R && "ElementRegion should not be null");
1872
1873 // Treat an n-dimensional array.
1874 SmallVector<SVal, 2> SValOffsets;
1875 const MemRegion *Base;
1876 std::tie(args&: SValOffsets, args&: Base) = getElementRegionOffsetsWithBase(ER: R);
1877 const VarRegion *VR = dyn_cast<VarRegion>(Val: Base);
1878 if (!VR)
1879 return std::nullopt;
1880
1881 assert(!SValOffsets.empty() && "getElementRegionOffsets guarantees the "
1882 "offsets vector is not empty.");
1883
1884 // Check if the containing array has an initialized value that we can trust.
1885 // We can trust a const value or a value of a global initializer in main().
1886 const VarDecl *VD = VR->getDecl();
1887 if (!VD->getType().isConstQualified() &&
1888 !R->getElementType().isConstQualified() &&
1889 (!B.isMainAnalysis() || !VD->hasGlobalStorage()))
1890 return std::nullopt;
1891
1892 // Array's declaration should have `ConstantArrayType` type, because only this
1893 // type contains an array extent. It may happen that array type can be of
1894 // `IncompleteArrayType` type. To get the declaration of `ConstantArrayType`
1895 // type, we should find the declaration in the redeclarations chain that has
1896 // the initialization expression.
1897 // NOTE: `getAnyInitializer` has an out-parameter, which returns a new `VD`
1898 // from which an initializer is obtained. We replace current `VD` with the new
1899 // `VD`. If the return value of the function is null than `VD` won't be
1900 // replaced.
1901 const Expr *Init = VD->getAnyInitializer(D&: VD);
1902 // NOTE: If `Init` is non-null, then a new `VD` is non-null for sure. So check
1903 // `Init` for null only and don't worry about the replaced `VD`.
1904 if (!Init)
1905 return std::nullopt;
1906
1907 // Array's declaration should have ConstantArrayType type, because only this
1908 // type contains an array extent.
1909 const ConstantArrayType *CAT = Ctx.getAsConstantArrayType(T: VD->getType());
1910 if (!CAT)
1911 return std::nullopt;
1912
1913 // Get array extents.
1914 SmallVector<uint64_t, 2> Extents = getConstantArrayExtents(CAT);
1915
1916 // The number of offsets should equal to the numbers of extents,
1917 // otherwise wrong type punning occurred. For instance:
1918 // int arr[1][2][3];
1919 // auto ptr = (int(*)[42])arr;
1920 // auto x = ptr[4][2]; // UB
1921 // FIXME: Should return UndefinedVal.
1922 if (SValOffsets.size() != Extents.size())
1923 return std::nullopt;
1924
1925 SmallVector<uint64_t, 2> ConcreteOffsets;
1926 if (std::optional<SVal> V = convertOffsetsFromSvalToUnsigneds(
1927 SrcOffsets: SValOffsets, ArrayExtents: Extents, DstOffsets&: ConcreteOffsets))
1928 return *V;
1929
1930 // Handle InitListExpr.
1931 // Example:
1932 // const char arr[4][2] = { { 1, 2 }, { 3 }, 4, 5 };
1933 if (const auto *ILE = dyn_cast<InitListExpr>(Val: Init))
1934 return getSValFromInitListExpr(ILE, ConcreteOffsets, ElemT: R->getElementType());
1935
1936 // Handle StringLiteral.
1937 // Example:
1938 // const char arr[] = "abc";
1939 if (const auto *SL = dyn_cast<StringLiteral>(Val: Init))
1940 return getSValFromStringLiteral(SL, Offset: ConcreteOffsets.front(),
1941 ElemT: R->getElementType());
1942
1943 // FIXME: Handle CompoundLiteralExpr.
1944
1945 return std::nullopt;
1946}
1947
1948/// Returns an SVal, if possible, for the specified position of an
1949/// initialization list.
1950///
1951/// \param ILE The given initialization list.
1952/// \param Offsets The array of unsigned offsets. E.g. for the expression
1953/// `int x = arr[1][2][3];` an array should be { 1, 2, 3 }.
1954/// \param ElemT The type of the result SVal expression.
1955/// \return Optional SVal for the particular position in the initialization
1956/// list. E.g. for the list `{{1, 2},[3, 4],{5, 6}, {}}` offsets:
1957/// - {1, 1} returns SVal{4}, because it's the second position in the second
1958/// sublist;
1959/// - {3, 0} returns SVal{0}, because there's no explicit value at this
1960/// position in the sublist.
1961///
1962/// NOTE: Inorder to get a valid SVal, a caller shall guarantee valid offsets
1963/// for the given initialization list. Otherwise SVal can be an equivalent to 0
1964/// or lead to assertion.
1965std::optional<SVal> RegionStoreManager::getSValFromInitListExpr(
1966 const InitListExpr *ILE, const SmallVector<uint64_t, 2> &Offsets,
1967 QualType ElemT) {
1968 assert(ILE && "InitListExpr should not be null");
1969
1970 for (uint64_t Offset : Offsets) {
1971 // C++20 [dcl.init.string] 9.4.2.1:
1972 // An array of ordinary character type [...] can be initialized by [...]
1973 // an appropriately-typed string-literal enclosed in braces.
1974 // Example:
1975 // const char arr[] = { "abc" };
1976 if (ILE->isStringLiteralInit())
1977 if (const auto *SL = dyn_cast<StringLiteral>(Val: ILE->getInit(Init: 0)))
1978 return getSValFromStringLiteral(SL, Offset, ElemT);
1979
1980 // C++20 [expr.add] 9.4.17.5 (excerpt):
1981 // i-th array element is value-initialized for each k < i ≤ n,
1982 // where k is an expression-list size and n is an array extent.
1983 if (Offset >= ILE->getNumInits())
1984 return svalBuilder.makeZeroVal(type: ElemT);
1985
1986 const Expr *E = ILE->getInit(Init: Offset);
1987 const auto *IL = dyn_cast<InitListExpr>(Val: E);
1988 if (!IL)
1989 // Return a constant value, if it is presented.
1990 // FIXME: Support other SVals.
1991 return svalBuilder.getConstantVal(E);
1992
1993 // Go to the nested initializer list.
1994 ILE = IL;
1995 }
1996
1997 assert(ILE);
1998
1999 // FIXME: Unhandeled InitListExpr sub-expression, possibly constructing an
2000 // enum?
2001 return std::nullopt;
2002}
2003
2004/// Returns an SVal, if possible, for the specified position in a string
2005/// literal.
2006///
2007/// \param SL The given string literal.
2008/// \param Offset The unsigned offset. E.g. for the expression
2009/// `char x = str[42];` an offset should be 42.
2010/// E.g. for the string "abc" offset:
2011/// - 1 returns SVal{b}, because it's the second position in the string.
2012/// - 42 returns SVal{0}, because there's no explicit value at this
2013/// position in the string.
2014/// \param ElemT The type of the result SVal expression.
2015///
2016/// NOTE: We return `0` for every offset >= the literal length for array
2017/// declarations, like:
2018/// const char str[42] = "123"; // Literal length is 4.
2019/// char c = str[41]; // Offset is 41.
2020/// FIXME: Nevertheless, we can't do the same for pointer declaraions, like:
2021/// const char * const str = "123"; // Literal length is 4.
2022/// char c = str[41]; // Offset is 41. Returns `0`, but Undef
2023/// // expected.
2024/// It should be properly handled before reaching this point.
2025/// The main problem is that we can't distinguish between these declarations,
2026/// because in case of array we can get the Decl from VarRegion, but in case
2027/// of pointer the region is a StringRegion, which doesn't contain a Decl.
2028/// Possible solution could be passing an array extent along with the offset.
2029SVal RegionStoreManager::getSValFromStringLiteral(const StringLiteral *SL,
2030 uint64_t Offset,
2031 QualType ElemT) {
2032 assert(SL && "StringLiteral should not be null");
2033 // C++20 [dcl.init.string] 9.4.2.3:
2034 // If there are fewer initializers than there are array elements, each
2035 // element not explicitly initialized shall be zero-initialized [dcl.init].
2036 uint32_t Code = (Offset >= SL->getLength()) ? 0 : SL->getCodeUnit(i: Offset);
2037 return svalBuilder.makeIntVal(integer: Code, type: ElemT);
2038}
2039
2040static std::optional<SVal> getDerivedSymbolForBinding(
2041 RegionBindingsConstRef B, const TypedValueRegion *BaseRegion,
2042 const TypedValueRegion *SubReg, const ASTContext &Ctx, SValBuilder &SVB) {
2043 assert(BaseRegion);
2044 QualType BaseTy = BaseRegion->getValueType();
2045 QualType Ty = SubReg->getValueType();
2046 if (BaseTy->isScalarType() && Ty->isScalarType()) {
2047 if (Ctx.getTypeSizeInChars(T: BaseTy) >= Ctx.getTypeSizeInChars(T: Ty)) {
2048 if (const std::optional<SVal> &ParentValue =
2049 B.getDirectBinding(R: BaseRegion)) {
2050 if (SymbolRef ParentValueAsSym = ParentValue->getAsSymbol())
2051 return SVB.getDerivedRegionValueSymbolVal(parentSymbol: ParentValueAsSym, region: SubReg);
2052
2053 if (ParentValue->isUndef())
2054 return UndefinedVal();
2055
2056 // Other cases: give up. We are indexing into a larger object
2057 // that has some value, but we don't know how to handle that yet.
2058 return UnknownVal();
2059 }
2060 }
2061 }
2062 return std::nullopt;
2063}
2064
2065SVal RegionStoreManager::getBindingForElement(RegionBindingsConstRef B,
2066 const ElementRegion* R) {
2067 // Check if the region has a binding.
2068 if (const std::optional<SVal> &V = B.getDirectBinding(R))
2069 return *V;
2070
2071 const MemRegion* superR = R->getSuperRegion();
2072
2073 // Check if the region is an element region of a string literal.
2074 if (const StringRegion *StrR = dyn_cast<StringRegion>(Val: superR)) {
2075 // FIXME: Handle loads from strings where the literal is treated as
2076 // an integer, e.g., *((unsigned int*)"hello"). Such loads are UB according
2077 // to C++20 7.2.1.11 [basic.lval].
2078 QualType T = Ctx.getAsArrayType(T: StrR->getValueType())->getElementType();
2079 if (!Ctx.hasSameUnqualifiedType(T1: T, T2: R->getElementType()))
2080 return UnknownVal();
2081 if (const auto CI = R->getIndex().getAs<nonloc::ConcreteInt>()) {
2082 const llvm::APSInt &Idx = CI->getValue();
2083 if (Idx < 0)
2084 return UndefinedVal();
2085 const StringLiteral *SL = StrR->getStringLiteral();
2086 return getSValFromStringLiteral(SL, Offset: Idx.getZExtValue(), ElemT: T);
2087 }
2088 } else if (isa<ElementRegion, VarRegion>(Val: superR)) {
2089 if (std::optional<SVal> V = getConstantValFromConstArrayInitializer(B, R))
2090 return *V;
2091 }
2092
2093 // Check for loads from a code text region. For such loads, just give up.
2094 if (isa<CodeTextRegion>(Val: superR))
2095 return UnknownVal();
2096
2097 // Handle the case where we are indexing into a larger scalar object.
2098 // For example, this handles:
2099 // int x = ...
2100 // char *y = &x;
2101 // return *y;
2102 // FIXME: This is a hack, and doesn't do anything really intelligent yet.
2103 const RegionRawOffset &O = R->getAsArrayOffset();
2104
2105 // If we cannot reason about the offset, return an unknown value.
2106 if (!O.getRegion())
2107 return UnknownVal();
2108
2109 if (const TypedValueRegion *baseR = dyn_cast<TypedValueRegion>(Val: O.getRegion()))
2110 if (auto V = getDerivedSymbolForBinding(B, BaseRegion: baseR, SubReg: R, Ctx, SVB&: svalBuilder))
2111 return *V;
2112
2113 return getBindingForFieldOrElementCommon(B, R, Ty: R->getElementType());
2114}
2115
2116SVal RegionStoreManager::getBindingForField(RegionBindingsConstRef B,
2117 const FieldRegion* R) {
2118
2119 // Check if the region has a binding.
2120 if (const std::optional<SVal> &V = B.getDirectBinding(R))
2121 return *V;
2122
2123 // If the containing record was initialized, try to get its constant value.
2124 const FieldDecl *FD = R->getDecl();
2125 QualType Ty = FD->getType();
2126 const MemRegion* superR = R->getSuperRegion();
2127 if (const auto *VR = dyn_cast<VarRegion>(Val: superR)) {
2128 const VarDecl *VD = VR->getDecl();
2129 QualType RecordVarTy = VD->getType();
2130 unsigned Index = FD->getFieldIndex();
2131 // Either the record variable or the field has an initializer that we can
2132 // trust. We trust initializers of constants and, additionally, respect
2133 // initializers of globals when analyzing main().
2134 if (RecordVarTy.isConstQualified() || Ty.isConstQualified() ||
2135 (B.isMainAnalysis() && VD->hasGlobalStorage()))
2136 if (const Expr *Init = VD->getAnyInitializer())
2137 if (const auto *InitList = dyn_cast<InitListExpr>(Val: Init)) {
2138 if (Index < InitList->getNumInits()) {
2139 if (const Expr *FieldInit = InitList->getInit(Init: Index))
2140 if (std::optional<SVal> V = svalBuilder.getConstantVal(E: FieldInit))
2141 return *V;
2142 } else {
2143 return svalBuilder.makeZeroVal(type: Ty);
2144 }
2145 }
2146 }
2147
2148 // Handle the case where we are accessing into a larger scalar object.
2149 // For example, this handles:
2150 // struct header {
2151 // unsigned a : 1;
2152 // unsigned b : 1;
2153 // };
2154 // struct parse_t {
2155 // unsigned bits0 : 1;
2156 // unsigned bits2 : 2; // <-- header
2157 // unsigned bits4 : 4;
2158 // };
2159 // int parse(parse_t *p) {
2160 // unsigned copy = p->bits2;
2161 // header *bits = (header *)&copy;
2162 // return bits->b; <-- here
2163 // }
2164 if (const auto *Base = dyn_cast<TypedValueRegion>(Val: R->getBaseRegion()))
2165 if (auto V = getDerivedSymbolForBinding(B, BaseRegion: Base, SubReg: R, Ctx, SVB&: svalBuilder))
2166 return *V;
2167
2168 return getBindingForFieldOrElementCommon(B, R, Ty);
2169}
2170
2171std::optional<SVal> RegionStoreManager::getBindingForDerivedDefaultValue(
2172 RegionBindingsConstRef B, const MemRegion *superR,
2173 const TypedValueRegion *R, QualType Ty) {
2174
2175 if (const std::optional<SVal> &D = B.getDefaultBinding(R: superR)) {
2176 SVal val = *D;
2177 if (SymbolRef parentSym = val.getAsSymbol())
2178 return svalBuilder.getDerivedRegionValueSymbolVal(parentSymbol: parentSym, region: R);
2179
2180 if (val.isZeroConstant())
2181 return svalBuilder.makeZeroVal(type: Ty);
2182
2183 if (val.isUnknownOrUndef())
2184 return val;
2185
2186 // Lazy bindings are usually handled through getExistingLazyBinding().
2187 // We should unify these two code paths at some point.
2188 if (isa<nonloc::LazyCompoundVal, nonloc::CompoundVal>(Val: val))
2189 return val;
2190
2191 llvm_unreachable("Unknown default value");
2192 }
2193
2194 return std::nullopt;
2195}
2196
2197SVal RegionStoreManager::getLazyBinding(const SubRegion *LazyBindingRegion,
2198 RegionBindingsRef LazyBinding) {
2199 SVal Result;
2200 if (const ElementRegion *ER = dyn_cast<ElementRegion>(Val: LazyBindingRegion))
2201 Result = getBindingForElement(B: LazyBinding, R: ER);
2202 else
2203 Result = getBindingForField(B: LazyBinding,
2204 R: cast<FieldRegion>(Val: LazyBindingRegion));
2205
2206 // FIXME: This is a hack to deal with RegionStore's inability to distinguish a
2207 // default value for /part/ of an aggregate from a default value for the
2208 // /entire/ aggregate. The most common case of this is when struct Outer
2209 // has as its first member a struct Inner, which is copied in from a stack
2210 // variable. In this case, even if the Outer's default value is symbolic, 0,
2211 // or unknown, it gets overridden by the Inner's default value of undefined.
2212 //
2213 // This is a general problem -- if the Inner is zero-initialized, the Outer
2214 // will now look zero-initialized. The proper way to solve this is with a
2215 // new version of RegionStore that tracks the extent of a binding as well
2216 // as the offset.
2217 //
2218 // This hack only takes care of the undefined case because that can very
2219 // quickly result in a warning.
2220 if (Result.isUndef())
2221 Result = UnknownVal();
2222
2223 return Result;
2224}
2225
2226SVal
2227RegionStoreManager::getBindingForFieldOrElementCommon(RegionBindingsConstRef B,
2228 const TypedValueRegion *R,
2229 QualType Ty) {
2230
2231 // At this point we have already checked in either getBindingForElement or
2232 // getBindingForField if 'R' has a direct binding.
2233
2234 // Lazy binding?
2235 Store lazyBindingStore = nullptr;
2236 const SubRegion *lazyBindingRegion = nullptr;
2237 std::tie(args&: lazyBindingStore, args&: lazyBindingRegion) = findLazyBinding(B, R, originalRegion: R);
2238 if (lazyBindingRegion)
2239 return getLazyBinding(LazyBindingRegion: lazyBindingRegion,
2240 LazyBinding: getRegionBindings(store: lazyBindingStore));
2241
2242 // Record whether or not we see a symbolic index. That can completely
2243 // be out of scope of our lookup.
2244 bool hasSymbolicIndex = false;
2245
2246 // FIXME: This is a hack to deal with RegionStore's inability to distinguish a
2247 // default value for /part/ of an aggregate from a default value for the
2248 // /entire/ aggregate. The most common case of this is when struct Outer
2249 // has as its first member a struct Inner, which is copied in from a stack
2250 // variable. In this case, even if the Outer's default value is symbolic, 0,
2251 // or unknown, it gets overridden by the Inner's default value of undefined.
2252 //
2253 // This is a general problem -- if the Inner is zero-initialized, the Outer
2254 // will now look zero-initialized. The proper way to solve this is with a
2255 // new version of RegionStore that tracks the extent of a binding as well
2256 // as the offset.
2257 //
2258 // This hack only takes care of the undefined case because that can very
2259 // quickly result in a warning.
2260 bool hasPartialLazyBinding = false;
2261
2262 const SubRegion *SR = R;
2263 while (SR) {
2264 const MemRegion *Base = SR->getSuperRegion();
2265 if (std::optional<SVal> D =
2266 getBindingForDerivedDefaultValue(B, superR: Base, R, Ty)) {
2267 if (D->getAs<nonloc::LazyCompoundVal>()) {
2268 hasPartialLazyBinding = true;
2269 break;
2270 }
2271
2272 return *D;
2273 }
2274
2275 if (const ElementRegion *ER = dyn_cast<ElementRegion>(Val: Base)) {
2276 NonLoc index = ER->getIndex();
2277 if (!index.isConstant())
2278 hasSymbolicIndex = true;
2279 }
2280
2281 // If our super region is a field or element itself, walk up the region
2282 // hierarchy to see if there is a default value installed in an ancestor.
2283 SR = dyn_cast<SubRegion>(Val: Base);
2284 }
2285
2286 if (isa<StackLocalsSpaceRegion>(Val: R->getRawMemorySpace())) {
2287 if (isa<ElementRegion>(Val: R)) {
2288 // Currently we don't reason specially about Clang-style vectors. Check
2289 // if superR is a vector and if so return Unknown.
2290 if (const TypedValueRegion *typedSuperR =
2291 dyn_cast<TypedValueRegion>(Val: R->getSuperRegion())) {
2292 if (typedSuperR->getValueType()->isVectorType())
2293 return UnknownVal();
2294 }
2295 }
2296
2297 // FIXME: We also need to take ElementRegions with symbolic indexes into
2298 // account. This case handles both directly accessing an ElementRegion
2299 // with a symbolic offset, but also fields within an element with
2300 // a symbolic offset.
2301 if (hasSymbolicIndex)
2302 return UnknownVal();
2303
2304 // Additionally allow introspection of a block's internal layout.
2305 // Try to get direct binding if all other attempts failed thus far.
2306 // Else, return UndefinedVal()
2307 if (!hasPartialLazyBinding && !isa<BlockDataRegion>(Val: R->getBaseRegion())) {
2308 if (const std::optional<SVal> &V = B.getDefaultBinding(R))
2309 return *V;
2310 return UndefinedVal();
2311 }
2312 }
2313
2314 // All other values are symbolic.
2315 return svalBuilder.getRegionValueSymbolVal(region: R);
2316}
2317
2318SVal RegionStoreManager::getBindingForObjCIvar(RegionBindingsConstRef B,
2319 const ObjCIvarRegion* R) {
2320 // Check if the region has a binding.
2321 if (const std::optional<SVal> &V = B.getDirectBinding(R))
2322 return *V;
2323
2324 const MemRegion *superR = R->getSuperRegion();
2325
2326 // Check if the super region has a default binding.
2327 if (const std::optional<SVal> &V = B.getDefaultBinding(R: superR)) {
2328 if (SymbolRef parentSym = V->getAsSymbol())
2329 return svalBuilder.getDerivedRegionValueSymbolVal(parentSymbol: parentSym, region: R);
2330
2331 // Other cases: give up.
2332 return UnknownVal();
2333 }
2334
2335 return getBindingForLazySymbol(R);
2336}
2337
2338SVal RegionStoreManager::getBindingForVar(RegionBindingsConstRef B,
2339 const VarRegion *R) {
2340
2341 // Check if the region has a binding.
2342 if (std::optional<SVal> V = B.getDirectBinding(R))
2343 return *V;
2344
2345 if (std::optional<SVal> V = B.getDefaultBinding(R))
2346 return *V;
2347
2348 // Lazily derive a value for the VarRegion.
2349 const VarDecl *VD = R->getDecl();
2350 const MemSpaceRegion *MS = R->getRawMemorySpace();
2351
2352 // Arguments are always symbolic.
2353 if (isa<StackArgumentsSpaceRegion>(Val: MS))
2354 return svalBuilder.getRegionValueSymbolVal(region: R);
2355
2356 // Is 'VD' declared constant? If so, retrieve the constant value.
2357 if (VD->getType().isConstQualified()) {
2358 if (const Expr *Init = VD->getAnyInitializer()) {
2359 if (std::optional<SVal> V = svalBuilder.getConstantVal(E: Init))
2360 return *V;
2361
2362 // If the variable is const qualified and has an initializer but
2363 // we couldn't evaluate initializer to a value, treat the value as
2364 // unknown.
2365 return UnknownVal();
2366 }
2367 }
2368
2369 // This must come after the check for constants because closure-captured
2370 // constant variables may appear in UnknownSpaceRegion.
2371 if (isa<UnknownSpaceRegion>(Val: MS))
2372 return svalBuilder.getRegionValueSymbolVal(region: R);
2373
2374 if (isa<GlobalsSpaceRegion>(Val: MS)) {
2375 QualType T = VD->getType();
2376
2377 // If we're in main(), then global initializers have not become stale yet.
2378 if (B.isMainAnalysis())
2379 if (const Expr *Init = VD->getAnyInitializer())
2380 if (std::optional<SVal> V = svalBuilder.getConstantVal(E: Init))
2381 return *V;
2382
2383 // Function-scoped static variables are default-initialized to 0; if they
2384 // have an initializer, it would have been processed by now.
2385 // FIXME: This is only true when we're starting analysis from main().
2386 // We're losing a lot of coverage here.
2387 if (isa<StaticGlobalSpaceRegion>(Val: MS))
2388 return svalBuilder.makeZeroVal(type: T);
2389
2390 if (std::optional<SVal> V = getBindingForDerivedDefaultValue(B, superR: MS, R, Ty: T)) {
2391 assert(!V->getAs<nonloc::LazyCompoundVal>());
2392 return *V;
2393 }
2394
2395 return svalBuilder.getRegionValueSymbolVal(region: R);
2396 }
2397
2398 return UndefinedVal();
2399}
2400
2401SVal RegionStoreManager::getBindingForLazySymbol(const TypedValueRegion *R) {
2402 // All other values are symbolic.
2403 return svalBuilder.getRegionValueSymbolVal(region: R);
2404}
2405
2406const RegionStoreManager::SValListTy &
2407RegionStoreManager::getInterestingValues(nonloc::LazyCompoundVal LCV) {
2408 // First, check the cache.
2409 LazyBindingsMapTy::iterator I = LazyBindingsMap.find(Val: LCV.getCVData());
2410 if (I != LazyBindingsMap.end())
2411 return I->second;
2412
2413 // If we don't have a list of values cached, start constructing it.
2414 SValListTy List;
2415
2416 const SubRegion *LazyR = LCV.getRegion();
2417 RegionBindingsRef B = getRegionBindings(store: LCV.getStore());
2418
2419 // If this region had /no/ bindings at the time, there are no interesting
2420 // values to return.
2421 const ClusterBindings *Cluster = B.lookup(K: LazyR->getBaseRegion());
2422 if (!Cluster)
2423 return (LazyBindingsMap[LCV.getCVData()] = std::move(List));
2424
2425 SmallVector<BindingPair, 32> Bindings;
2426 collectSubRegionBindings(Bindings, SVB&: svalBuilder, Cluster: *Cluster, Top: LazyR,
2427 /*IncludeAllDefaultBindings=*/true);
2428 for (SVal V : llvm::make_second_range(c&: Bindings)) {
2429 if (V.isUnknownOrUndef() || V.isConstant())
2430 continue;
2431
2432 if (auto InnerLCV = V.getAs<nonloc::LazyCompoundVal>()) {
2433 const SValListTy &InnerList = getInterestingValues(LCV: *InnerLCV);
2434 llvm::append_range(C&: List, R: InnerList);
2435 }
2436
2437 List.push_back(x: V);
2438 }
2439
2440 return (LazyBindingsMap[LCV.getCVData()] = std::move(List));
2441}
2442
2443NonLoc RegionStoreManager::createLazyBinding(RegionBindingsConstRef B,
2444 const TypedValueRegion *R) {
2445 if (std::optional<nonloc::LazyCompoundVal> V =
2446 getExistingLazyBinding(SVB&: svalBuilder, B, R, AllowSubregionBindings: false))
2447 return *V;
2448
2449 return svalBuilder.makeLazyCompoundVal(store: StoreRef(B.asStore(), *this), region: R);
2450}
2451
2452SVal RegionStoreManager::getBindingForStruct(RegionBindingsConstRef B,
2453 const TypedValueRegion *R) {
2454 const RecordDecl *RD =
2455 R->getValueType()->castAsCanonical<RecordType>()->getDecl();
2456 if (!RD->getDefinition())
2457 return UnknownVal();
2458
2459 // We also create a LCV for copying empty structs because then the store
2460 // behavior doesn't depend on the struct layout.
2461 // This way even an empty struct can carry taint, no matter if creduce drops
2462 // the last field member or not.
2463 return createLazyBinding(B, R);
2464}
2465
2466SVal RegionStoreManager::getBindingForArray(RegionBindingsConstRef B,
2467 const TypedValueRegion *R) {
2468 assert(Ctx.getAsConstantArrayType(R->getValueType()) &&
2469 "Only constant array types can have compound bindings.");
2470
2471 return createLazyBinding(B, R);
2472}
2473
2474bool RegionStoreManager::includedInBindings(Store store,
2475 const MemRegion *region) const {
2476 RegionBindingsRef B = getRegionBindings(store);
2477 region = region->getBaseRegion();
2478
2479 // Quick path: if the base is the head of a cluster, the region is live.
2480 if (B.lookup(K: region))
2481 return true;
2482
2483 // Slow path: if the region is the VALUE of any binding, it is live.
2484 for (RegionBindingsRef::iterator RI = B.begin(), RE = B.end(); RI != RE; ++RI) {
2485 const ClusterBindings &Cluster = RI.getData();
2486 for (ClusterBindings::iterator CI = Cluster.begin(), CE = Cluster.end();
2487 CI != CE; ++CI) {
2488 SVal D = CI.getData();
2489 if (const MemRegion *R = D.getAsRegion())
2490 if (R->getBaseRegion() == region)
2491 return true;
2492 }
2493 }
2494
2495 return false;
2496}
2497
2498//===----------------------------------------------------------------------===//
2499// Binding values to regions.
2500//===----------------------------------------------------------------------===//
2501
2502StoreRef RegionStoreManager::killBinding(Store ST, Loc L) {
2503 if (std::optional<loc::MemRegionVal> LV = L.getAs<loc::MemRegionVal>())
2504 if (const MemRegion* R = LV->getRegion())
2505 return StoreRef(getRegionBindings(store: ST)
2506 .removeBinding(R)
2507 .asImmutableMap()
2508 .getRootWithoutRetain(),
2509 *this);
2510
2511 return StoreRef(ST, *this);
2512}
2513
2514LimitedRegionBindingsRef
2515RegionStoreManager::bind(LimitedRegionBindingsConstRef B, Loc L, SVal V) {
2516 llvm::TimeTraceScope TimeScope("RegionStoreManager::bind",
2517 [&L]() { return locDescr(L); });
2518
2519 if (B.hasExhaustedBindingLimit())
2520 return B.withValuesEscaped(V);
2521
2522 // We only care about region locations.
2523 auto MemRegVal = L.getAs<loc::MemRegionVal>();
2524 if (!MemRegVal)
2525 return B;
2526
2527 const MemRegion *R = MemRegVal->getRegion();
2528
2529 // Binding directly to a symbolic region should be treated as binding
2530 // to element 0.
2531 if (const auto *SymReg = dyn_cast<SymbolicRegion>(Val: R)) {
2532 QualType Ty = SymReg->getPointeeStaticType();
2533 if (Ty->isVoidType())
2534 Ty = StateMgr.getContext().CharTy;
2535 R = GetElementZeroRegion(R: SymReg, T: Ty);
2536 }
2537
2538 // Check if the region is a struct region.
2539 if (const TypedValueRegion* TR = dyn_cast<TypedValueRegion>(Val: R)) {
2540 QualType Ty = TR->getValueType();
2541 if (Ty->isArrayType())
2542 return bindArray(B, R: TR, V);
2543 if (Ty->isStructureOrClassType())
2544 return bindStruct(B, R: TR, V);
2545 if (Ty->isVectorType())
2546 return bindVector(B, R: TR, V);
2547 if (Ty->isUnionType())
2548 return bindAggregate(B, R: TR, DefaultVal: V);
2549 }
2550
2551 assert((!isa<CXXThisRegion>(R) || !B.lookup(R)) &&
2552 "'this' pointer is not an l-value and is not assignable");
2553
2554 // Clear out bindings that may overlap with this binding.
2555 auto NewB = removeSubRegionBindings(B, Top: cast<SubRegion>(Val: R));
2556
2557 // LazyCompoundVals should be always bound as 'default' bindings.
2558 auto KeyKind = isa<nonloc::LazyCompoundVal>(Val: V) ? BindingKey::Default
2559 : BindingKey::Direct;
2560 return NewB.addBinding(K: BindingKey::Make(R, k: KeyKind), V);
2561}
2562
2563LimitedRegionBindingsRef
2564RegionStoreManager::setImplicitDefaultValue(LimitedRegionBindingsConstRef B,
2565 const MemRegion *R, QualType T) {
2566 if (B.hasExhaustedBindingLimit())
2567 return B;
2568
2569 // Prefer to keep the previous default binding if we had one; that is likely a
2570 // better choice than setting some arbitrary new default value.
2571 // This isn't ideal (more of a hack), but better than dropping the more
2572 // accurate default binding.
2573 if (B.getDefaultBinding(R).has_value()) {
2574 return B;
2575 }
2576
2577 SVal V;
2578
2579 if (Loc::isLocType(T))
2580 V = svalBuilder.makeNullWithType(type: T);
2581 else if (T->isIntegralOrEnumerationType())
2582 V = svalBuilder.makeZeroVal(type: T);
2583 else if (T->isStructureOrClassType() || T->isArrayType()) {
2584 // Set the default value to a zero constant when it is a structure
2585 // or array. The type doesn't really matter.
2586 V = svalBuilder.makeZeroVal(type: Ctx.IntTy);
2587 }
2588 else {
2589 // We can't represent values of this type, but we still need to set a value
2590 // to record that the region has been initialized.
2591 // If this assertion ever fires, a new case should be added above -- we
2592 // should know how to default-initialize any value we can symbolicate.
2593 assert(!SymbolManager::canSymbolicate(T) && "This type is representable");
2594 V = UnknownVal();
2595 }
2596
2597 return B.addBinding(R, k: BindingKey::Default, V);
2598}
2599
2600std::optional<LimitedRegionBindingsRef> RegionStoreManager::tryBindSmallArray(
2601 LimitedRegionBindingsConstRef B, const TypedValueRegion *R,
2602 const ArrayType *AT, nonloc::LazyCompoundVal LCV) {
2603 if (B.hasExhaustedBindingLimit())
2604 return B.withValuesEscaped(V: LCV);
2605
2606 auto CAT = dyn_cast<ConstantArrayType>(Val: AT);
2607
2608 // If we don't know the size, create a lazyCompoundVal instead.
2609 if (!CAT)
2610 return std::nullopt;
2611
2612 QualType Ty = CAT->getElementType();
2613 if (!(Ty->isScalarType() || Ty->isReferenceType()))
2614 return std::nullopt;
2615
2616 // If the array is too big, create a LCV instead.
2617 uint64_t ArrSize = CAT->getLimitedSize();
2618 if (ArrSize > SmallArrayLimit)
2619 return std::nullopt;
2620
2621 LimitedRegionBindingsRef NewB = B;
2622
2623 for (uint64_t i = 0; i < ArrSize; ++i) {
2624 auto Idx = svalBuilder.makeArrayIndex(idx: i);
2625 const ElementRegion *SrcER =
2626 MRMgr.getElementRegion(elementType: Ty, Idx, superRegion: LCV.getRegion(), Ctx);
2627 SVal V = getBindingForElement(B: getRegionBindings(store: LCV.getStore()), R: SrcER);
2628
2629 const ElementRegion *DstER = MRMgr.getElementRegion(elementType: Ty, Idx, superRegion: R, Ctx);
2630 NewB = bind(B: NewB, L: loc::MemRegionVal(DstER), V);
2631 }
2632
2633 return NewB;
2634}
2635
2636LimitedRegionBindingsRef
2637RegionStoreManager::bindArray(LimitedRegionBindingsConstRef B,
2638 const TypedValueRegion *R, SVal Init) {
2639 llvm::TimeTraceScope TimeScope("RegionStoreManager::bindArray",
2640 [R]() { return R->getDescriptiveName(); });
2641 if (B.hasExhaustedBindingLimit())
2642 return B.withValuesEscaped(V: Init);
2643
2644 const ArrayType *AT =cast<ArrayType>(Val: Ctx.getCanonicalType(T: R->getValueType()));
2645 QualType ElementTy = AT->getElementType();
2646 std::optional<uint64_t> Size;
2647
2648 if (const ConstantArrayType* CAT = dyn_cast<ConstantArrayType>(Val: AT))
2649 Size = CAT->getZExtSize();
2650
2651 // Check if the init expr is a literal. If so, bind the rvalue instead.
2652 // FIXME: It's not responsibility of the Store to transform this lvalue
2653 // to rvalue. ExprEngine or maybe even CFG should do this before binding.
2654 if (std::optional<loc::MemRegionVal> MRV = Init.getAs<loc::MemRegionVal>()) {
2655 SVal V = getBinding(S: B.asStore(), L: *MRV, T: R->getValueType());
2656 return bindAggregate(B, R, DefaultVal: V);
2657 }
2658
2659 // FIXME Single value constant should have been handled before this call to
2660 // bindArray. This is only a hotfix to not crash.
2661 if (Init.isConstant())
2662 return bindAggregate(B, R, DefaultVal: Init);
2663
2664 if (std::optional LCV = Init.getAs<nonloc::LazyCompoundVal>()) {
2665 if (std::optional NewB = tryBindSmallArray(B, R, AT, LCV: *LCV))
2666 return *NewB;
2667 return bindAggregate(B, R, DefaultVal: Init);
2668 }
2669
2670 if (isa<nonloc::SymbolVal, UnknownVal, UndefinedVal>(Val: Init))
2671 return bindAggregate(B, R, DefaultVal: Init);
2672
2673 // Remaining case: explicit compound values.
2674 const nonloc::CompoundVal& CV = Init.castAs<nonloc::CompoundVal>();
2675 nonloc::CompoundVal::iterator VI = CV.begin(), VE = CV.end();
2676 uint64_t i = 0;
2677
2678 LimitedRegionBindingsRef NewB = B;
2679
2680 for (; Size ? i < *Size : true; ++i, ++VI) {
2681 // The init list might be shorter than the array length.
2682 if (VI == VE)
2683 break;
2684 if (NewB.hasExhaustedBindingLimit())
2685 return NewB.withValuesEscaped(Begin: VI, End: VE);
2686
2687 NonLoc Idx = svalBuilder.makeArrayIndex(idx: i);
2688 const ElementRegion *ER = MRMgr.getElementRegion(elementType: ElementTy, Idx, superRegion: R, Ctx);
2689
2690 if (ElementTy->isStructureOrClassType())
2691 NewB = bindStruct(B: NewB, R: ER, V: *VI);
2692 else if (ElementTy->isArrayType())
2693 NewB = bindArray(B: NewB, R: ER, Init: *VI);
2694 else
2695 NewB = bind(B: NewB, L: loc::MemRegionVal(ER), V: *VI);
2696 }
2697
2698 // If the init list is shorter than the array length (or the array has
2699 // variable length), set the array default value. Values that are already set
2700 // are not overwritten.
2701 if (!Size || i < *Size)
2702 NewB = setImplicitDefaultValue(B: NewB, R, T: ElementTy);
2703
2704 return NewB;
2705}
2706
2707LimitedRegionBindingsRef
2708RegionStoreManager::bindVector(LimitedRegionBindingsConstRef B,
2709 const TypedValueRegion *R, SVal V) {
2710 llvm::TimeTraceScope TimeScope("RegionStoreManager::bindVector",
2711 [R]() { return R->getDescriptiveName(); });
2712 if (B.hasExhaustedBindingLimit())
2713 return B.withValuesEscaped(V);
2714
2715 QualType T = R->getValueType();
2716 const VectorType *VT = T->castAs<VectorType>(); // Use castAs for typedefs.
2717
2718 // Handle lazy compound values and symbolic values.
2719 if (isa<nonloc::LazyCompoundVal, nonloc::SymbolVal>(Val: V))
2720 return bindAggregate(B, R, DefaultVal: V);
2721
2722 // We may get non-CompoundVal accidentally due to imprecise cast logic or
2723 // that we are binding symbolic struct value. Kill the field values, and if
2724 // the value is symbolic go and bind it as a "default" binding.
2725 if (!isa<nonloc::CompoundVal>(Val: V)) {
2726 return bindAggregate(B, R, DefaultVal: UnknownVal());
2727 }
2728
2729 QualType ElemType = VT->getElementType();
2730 nonloc::CompoundVal CV = V.castAs<nonloc::CompoundVal>();
2731 nonloc::CompoundVal::iterator VI = CV.begin(), VE = CV.end();
2732 unsigned index = 0, numElements = VT->getNumElements();
2733 LimitedRegionBindingsRef NewB = B;
2734
2735 for ( ; index != numElements ; ++index) {
2736 if (VI == VE)
2737 break;
2738
2739 if (NewB.hasExhaustedBindingLimit())
2740 return NewB.withValuesEscaped(Begin: VI, End: VE);
2741
2742 NonLoc Idx = svalBuilder.makeArrayIndex(idx: index);
2743 const ElementRegion *ER = MRMgr.getElementRegion(elementType: ElemType, Idx, superRegion: R, Ctx);
2744
2745 if (ElemType->isArrayType())
2746 NewB = bindArray(B: NewB, R: ER, Init: *VI);
2747 else if (ElemType->isStructureOrClassType())
2748 NewB = bindStruct(B: NewB, R: ER, V: *VI);
2749 else
2750 NewB = bind(B: NewB, L: loc::MemRegionVal(ER), V: *VI);
2751 }
2752 return NewB;
2753}
2754
2755std::optional<LimitedRegionBindingsRef> RegionStoreManager::tryBindSmallStruct(
2756 LimitedRegionBindingsConstRef B, const TypedValueRegion *R,
2757 const RecordDecl *RD, nonloc::LazyCompoundVal LCV) {
2758 if (B.hasExhaustedBindingLimit())
2759 return B.withValuesEscaped(V: LCV);
2760
2761 FieldVector Fields;
2762
2763 if (const CXXRecordDecl *Class = dyn_cast<CXXRecordDecl>(Val: RD))
2764 if (Class->getNumBases() != 0 || Class->getNumVBases() != 0)
2765 return std::nullopt;
2766
2767 for (const auto *FD : RD->fields()) {
2768 if (FD->isUnnamedBitField())
2769 continue;
2770
2771 // If there are too many fields, or if any of the fields are aggregates,
2772 // just use the LCV as a default binding.
2773 if (Fields.size() == SmallStructLimit)
2774 return std::nullopt;
2775
2776 QualType Ty = FD->getType();
2777
2778 // Zero length arrays are basically no-ops, so we also ignore them here.
2779 if (Ty->isConstantArrayType() &&
2780 Ctx.getConstantArrayElementCount(CA: Ctx.getAsConstantArrayType(T: Ty)) == 0)
2781 continue;
2782
2783 if (!(Ty->isScalarType() || Ty->isReferenceType()))
2784 return std::nullopt;
2785
2786 Fields.push_back(Elt: FD);
2787 }
2788
2789 LimitedRegionBindingsRef NewB = B;
2790
2791 for (const FieldDecl *Field : Fields) {
2792 const FieldRegion *SourceFR = MRMgr.getFieldRegion(FD: Field, SuperRegion: LCV.getRegion());
2793 SVal V = getBindingForField(B: getRegionBindings(store: LCV.getStore()), R: SourceFR);
2794
2795 const FieldRegion *DestFR = MRMgr.getFieldRegion(FD: Field, SuperRegion: R);
2796 NewB = bind(B: NewB, L: loc::MemRegionVal(DestFR), V);
2797 }
2798
2799 return NewB;
2800}
2801
2802LimitedRegionBindingsRef
2803RegionStoreManager::bindStruct(LimitedRegionBindingsConstRef B,
2804 const TypedValueRegion *R, SVal V) {
2805 llvm::TimeTraceScope TimeScope("RegionStoreManager::bindStruct",
2806 [R]() { return R->getDescriptiveName(); });
2807 if (B.hasExhaustedBindingLimit())
2808 return B.withValuesEscaped(V);
2809
2810 QualType T = R->getValueType();
2811 assert(T->isStructureOrClassType());
2812
2813 const auto *RD = T->castAsRecordDecl();
2814 if (!RD->isCompleteDefinition())
2815 return B;
2816
2817 // Handle lazy compound values and symbolic values.
2818 if (std::optional<nonloc::LazyCompoundVal> LCV =
2819 V.getAs<nonloc::LazyCompoundVal>()) {
2820 if (std::optional NewB = tryBindSmallStruct(B, R, RD, LCV: *LCV))
2821 return *NewB;
2822 return bindAggregate(B, R, DefaultVal: V);
2823 }
2824 if (isa<nonloc::SymbolVal>(Val: V))
2825 return bindAggregate(B, R, DefaultVal: V);
2826
2827 // We may get non-CompoundVal accidentally due to imprecise cast logic or
2828 // that we are binding symbolic struct value. Kill the field values, and if
2829 // the value is symbolic go and bind it as a "default" binding.
2830 if (V.isUnknown() || !isa<nonloc::CompoundVal>(Val: V))
2831 return bindAggregate(B, R, DefaultVal: UnknownVal());
2832
2833 // The raw CompoundVal is essentially a symbolic InitListExpr: an (immutable)
2834 // list of other values. It appears pretty much only when there's an actual
2835 // initializer list expression in the program, and the analyzer tries to
2836 // unwrap it as soon as possible.
2837 // This code is where such unwrap happens: when the compound value is put into
2838 // the object that it was supposed to initialize (it's an *initializer* list,
2839 // after all), instead of binding the whole value to the whole object, we bind
2840 // sub-values to sub-objects. Sub-values may themselves be compound values,
2841 // and in this case the procedure becomes recursive.
2842 // FIXME: The annoying part about compound values is that they don't carry
2843 // any sort of information about which value corresponds to which sub-object.
2844 // It's simply a list of values in the middle of nowhere; we expect to match
2845 // them to sub-objects, essentially, "by index": first value binds to
2846 // the first field, second value binds to the second field, etc.
2847 // It would have been much safer to organize non-lazy compound values as
2848 // a mapping from fields/bases to values.
2849 const nonloc::CompoundVal& CV = V.castAs<nonloc::CompoundVal>();
2850 nonloc::CompoundVal::iterator VI = CV.begin(), VE = CV.end();
2851
2852 LimitedRegionBindingsRef NewB = B;
2853
2854 // In C++17 aggregates may have base classes, handle those as well.
2855 // They appear before fields in the initializer list / compound value.
2856 if (const auto *CRD = dyn_cast<CXXRecordDecl>(Val: RD)) {
2857 // If the object was constructed with a constructor, its value is a
2858 // LazyCompoundVal. If it's a raw CompoundVal, it means that we're
2859 // performing aggregate initialization. The only exception from this
2860 // rule is sending an Objective-C++ message that returns a C++ object
2861 // to a nil receiver; in this case the semantics is to return a
2862 // zero-initialized object even if it's a C++ object that doesn't have
2863 // this sort of constructor; the CompoundVal is empty in this case.
2864 assert((CRD->isAggregate() || (Ctx.getLangOpts().ObjC && VI == VE)) &&
2865 "Non-aggregates are constructed with a constructor!");
2866
2867 for (const auto &B : CRD->bases()) {
2868 // (Multiple inheritance is fine though.)
2869 assert(!B.isVirtual() && "Aggregates cannot have virtual base classes!");
2870
2871 if (VI == VE)
2872 break;
2873 if (NewB.hasExhaustedBindingLimit())
2874 return NewB.withValuesEscaped(Begin: VI, End: VE);
2875
2876 QualType BTy = B.getType();
2877 assert(BTy->isStructureOrClassType() && "Base classes must be classes!");
2878
2879 const CXXRecordDecl *BRD = BTy->getAsCXXRecordDecl();
2880 assert(BRD && "Base classes must be C++ classes!");
2881
2882 const CXXBaseObjectRegion *BR =
2883 MRMgr.getCXXBaseObjectRegion(BaseClass: BRD, Super: R, /*IsVirtual=*/false);
2884
2885 NewB = bindStruct(B: NewB, R: BR, V: *VI);
2886
2887 ++VI;
2888 }
2889 }
2890
2891 RecordDecl::field_iterator FI, FE;
2892
2893 for (FI = RD->field_begin(), FE = RD->field_end(); FI != FE; ++FI) {
2894
2895 if (VI == VE)
2896 break;
2897
2898 if (NewB.hasExhaustedBindingLimit())
2899 return NewB.withValuesEscaped(Begin: VI, End: VE);
2900
2901 // Skip any unnamed bitfields to stay in sync with the initializers.
2902 if (FI->isUnnamedBitField())
2903 continue;
2904
2905 QualType FTy = FI->getType();
2906 const FieldRegion* FR = MRMgr.getFieldRegion(FD: *FI, SuperRegion: R);
2907
2908 if (FTy->isArrayType())
2909 NewB = bindArray(B: NewB, R: FR, Init: *VI);
2910 else if (FTy->isStructureOrClassType())
2911 NewB = bindStruct(B: NewB, R: FR, V: *VI);
2912 else
2913 NewB = bind(B: NewB, L: loc::MemRegionVal(FR), V: *VI);
2914 ++VI;
2915 }
2916
2917 if (NewB.hasExhaustedBindingLimit())
2918 return NewB.withValuesEscaped(Begin: VI, End: VE);
2919
2920 // There may be fewer values in the initialize list than the fields of struct.
2921 if (FI != FE) {
2922 NewB = NewB.addBinding(R, k: BindingKey::Default,
2923 V: svalBuilder.makeIntVal(integer: 0, isUnsigned: false));
2924 }
2925
2926 return NewB;
2927}
2928
2929LimitedRegionBindingsRef
2930RegionStoreManager::bindAggregate(LimitedRegionBindingsConstRef B,
2931 const TypedRegion *R, SVal Val) {
2932 llvm::TimeTraceScope TimeScope("RegionStoreManager::bindAggregate",
2933 [R]() { return R->getDescriptiveName(); });
2934 if (B.hasExhaustedBindingLimit())
2935 return B.withValuesEscaped(V: Val);
2936
2937 // Remove the old bindings, using 'R' as the root of all regions
2938 // we will invalidate. Then add the new binding.
2939 return removeSubRegionBindings(B, Top: R).addBinding(R, k: BindingKey::Default, V: Val);
2940}
2941
2942//===----------------------------------------------------------------------===//
2943// State pruning.
2944//===----------------------------------------------------------------------===//
2945
2946namespace {
2947class RemoveDeadBindingsWorker
2948 : public ClusterAnalysis<RemoveDeadBindingsWorker> {
2949 SmallVector<const SymbolicRegion *, 12> Postponed;
2950 SymbolReaper &SymReaper;
2951 const StackFrameContext *CurrentLCtx;
2952
2953public:
2954 RemoveDeadBindingsWorker(RegionStoreManager &rm,
2955 ProgramStateManager &stateMgr,
2956 RegionBindingsRef b, SymbolReaper &symReaper,
2957 const StackFrameContext *LCtx)
2958 : ClusterAnalysis<RemoveDeadBindingsWorker>(rm, stateMgr, b),
2959 SymReaper(symReaper), CurrentLCtx(LCtx) {}
2960
2961 // Called by ClusterAnalysis.
2962 void VisitAddedToCluster(const MemRegion *baseR, const ClusterBindings &C);
2963 void VisitCluster(const MemRegion *baseR, const ClusterBindings *C);
2964 using ClusterAnalysis<RemoveDeadBindingsWorker>::VisitCluster;
2965
2966 using ClusterAnalysis::AddToWorkList;
2967
2968 bool AddToWorkList(const MemRegion *R);
2969
2970 bool UpdatePostponed();
2971 void VisitBinding(SVal V);
2972};
2973}
2974
2975bool RemoveDeadBindingsWorker::AddToWorkList(const MemRegion *R) {
2976 const MemRegion *BaseR = R->getBaseRegion();
2977 return AddToWorkList(E: WorkListElement(BaseR), C: getCluster(R: BaseR));
2978}
2979
2980void RemoveDeadBindingsWorker::VisitAddedToCluster(const MemRegion *baseR,
2981 const ClusterBindings &C) {
2982
2983 if (const VarRegion *VR = dyn_cast<VarRegion>(Val: baseR)) {
2984 if (SymReaper.isLive(VR))
2985 AddToWorkList(E: baseR, C: &C);
2986
2987 return;
2988 }
2989
2990 if (const SymbolicRegion *SR = dyn_cast<SymbolicRegion>(Val: baseR)) {
2991 if (SymReaper.isLive(sym: SR->getSymbol()))
2992 AddToWorkList(E: SR, C: &C);
2993 else
2994 Postponed.push_back(Elt: SR);
2995
2996 return;
2997 }
2998
2999 if (isa<NonStaticGlobalSpaceRegion>(Val: baseR)) {
3000 AddToWorkList(E: baseR, C: &C);
3001 return;
3002 }
3003
3004 // CXXThisRegion in the current or parent location context is live.
3005 if (const CXXThisRegion *TR = dyn_cast<CXXThisRegion>(Val: baseR)) {
3006 const auto *StackReg =
3007 cast<StackArgumentsSpaceRegion>(Val: TR->getSuperRegion());
3008 const StackFrameContext *RegCtx = StackReg->getStackFrame();
3009 if (CurrentLCtx &&
3010 (RegCtx == CurrentLCtx || RegCtx->isParentOf(LC: CurrentLCtx)))
3011 AddToWorkList(E: TR, C: &C);
3012 }
3013}
3014
3015void RemoveDeadBindingsWorker::VisitCluster(const MemRegion *baseR,
3016 const ClusterBindings *C) {
3017 if (!C)
3018 return;
3019
3020 // Mark the symbol for any SymbolicRegion with live bindings as live itself.
3021 // This means we should continue to track that symbol.
3022 if (const SymbolicRegion *SymR = dyn_cast<SymbolicRegion>(Val: baseR))
3023 SymReaper.markLive(sym: SymR->getSymbol());
3024
3025 for (const auto &[Key, Val] : *C) {
3026 // Element index of a binding key is live.
3027 SymReaper.markElementIndicesLive(region: Key.getRegion());
3028
3029 VisitBinding(V: Val);
3030 }
3031}
3032
3033void RemoveDeadBindingsWorker::VisitBinding(SVal V) {
3034 // Is it a LazyCompoundVal? All referenced regions are live as well.
3035 // The LazyCompoundVal itself is not live but should be readable.
3036 if (auto LCS = V.getAs<nonloc::LazyCompoundVal>()) {
3037 SymReaper.markLazilyCopied(region: LCS->getRegion());
3038
3039 for (SVal V : RM.getInterestingValues(LCV: *LCS)) {
3040 if (auto DepLCS = V.getAs<nonloc::LazyCompoundVal>())
3041 SymReaper.markLazilyCopied(region: DepLCS->getRegion());
3042 else
3043 VisitBinding(V);
3044 }
3045
3046 return;
3047 }
3048
3049 // If V is a region, then add it to the worklist.
3050 if (const MemRegion *R = V.getAsRegion()) {
3051 AddToWorkList(R);
3052 SymReaper.markLive(region: R);
3053
3054 // All regions captured by a block are also live.
3055 if (const BlockDataRegion *BR = dyn_cast<BlockDataRegion>(Val: R)) {
3056 for (auto Var : BR->referenced_vars())
3057 AddToWorkList(R: Var.getCapturedRegion());
3058 }
3059 }
3060
3061
3062 // Update the set of live symbols.
3063 for (SymbolRef Sym : V.symbols())
3064 SymReaper.markLive(sym: Sym);
3065}
3066
3067bool RemoveDeadBindingsWorker::UpdatePostponed() {
3068 // See if any postponed SymbolicRegions are actually live now, after
3069 // having done a scan.
3070 bool Changed = false;
3071
3072 for (const SymbolicRegion *SR : Postponed) {
3073 if (SymReaper.isLive(sym: SR->getSymbol())) {
3074 Changed |= AddToWorkList(R: SR);
3075 SR = nullptr;
3076 }
3077 }
3078
3079 return Changed;
3080}
3081
3082StoreRef RegionStoreManager::removeDeadBindings(Store store,
3083 const StackFrameContext *LCtx,
3084 SymbolReaper& SymReaper) {
3085 RegionBindingsRef B = getRegionBindings(store);
3086 RemoveDeadBindingsWorker W(*this, StateMgr, B, SymReaper, LCtx);
3087 W.GenerateClusters();
3088
3089 // Enqueue the region roots onto the worklist.
3090 for (const MemRegion *Reg : SymReaper.regions()) {
3091 W.AddToWorkList(R: Reg);
3092 }
3093
3094 do W.RunWorkList(); while (W.UpdatePostponed());
3095
3096 // We have now scanned the store, marking reachable regions and symbols
3097 // as live. We now remove all the regions that are dead from the store
3098 // as well as update DSymbols with the set symbols that are now dead.
3099 for (const MemRegion *Base : llvm::make_first_range(c&: B)) {
3100 // If the cluster has been visited, we know the region has been marked.
3101 // Otherwise, remove the dead entry.
3102 if (!W.isVisited(R: Base))
3103 B = B.removeCluster(BaseRegion: Base);
3104 }
3105
3106 return StoreRef(B.asStore(), *this);
3107}
3108
3109//===----------------------------------------------------------------------===//
3110// Utility methods.
3111//===----------------------------------------------------------------------===//
3112
3113void RegionStoreManager::printJson(raw_ostream &Out, Store S, const char *NL,
3114 unsigned int Space, bool IsDot) const {
3115 RegionBindingsRef Bindings = getRegionBindings(store: S);
3116
3117 Indent(Out, Space, IsDot) << "\"store\": ";
3118
3119 if (Bindings.isEmpty()) {
3120 Out << "null," << NL;
3121 return;
3122 }
3123
3124 Out << "{ \"pointer\": \"" << Bindings.asStore() << "\", \"items\": [" << NL;
3125 Bindings.printJson(Out, NL, Space: Space + 1, IsDot);
3126 Indent(Out, Space, IsDot) << "]}," << NL;
3127}
3128