1 | //=== MallocChecker.cpp - A malloc/free checker -------------------*- C++ -*--// |
2 | // |
3 | // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. |
4 | // See https://llvm.org/LICENSE.txt for license information. |
5 | // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception |
6 | // |
7 | //===----------------------------------------------------------------------===// |
8 | // |
9 | // This file defines a variety of memory management related checkers, such as |
10 | // leak, double free, and use-after-free. |
11 | // |
12 | // The following checkers are defined here: |
13 | // |
14 | // * MallocChecker |
15 | // Despite its name, it models all sorts of memory allocations and |
16 | // de- or reallocation, including but not limited to malloc, free, |
17 | // relloc, new, delete. It also reports on a variety of memory misuse |
18 | // errors. |
19 | // Many other checkers interact very closely with this checker, in fact, |
20 | // most are merely options to this one. Other checkers may register |
21 | // MallocChecker, but do not enable MallocChecker's reports (more details |
22 | // to follow around its field, ChecksEnabled). |
23 | // It also has a boolean "Optimistic" checker option, which if set to true |
24 | // will cause the checker to model user defined memory management related |
25 | // functions annotated via the attribute ownership_takes, ownership_holds |
26 | // and ownership_returns. |
27 | // |
28 | // * NewDeleteChecker |
29 | // Enables the modeling of new, new[], delete, delete[] in MallocChecker, |
30 | // and checks for related double-free and use-after-free errors. |
31 | // |
32 | // * NewDeleteLeaksChecker |
33 | // Checks for leaks related to new, new[], delete, delete[]. |
34 | // Depends on NewDeleteChecker. |
35 | // |
36 | // * MismatchedDeallocatorChecker |
37 | // Enables checking whether memory is deallocated with the corresponding |
38 | // allocation function in MallocChecker, such as malloc() allocated |
39 | // regions are only freed by free(), new by delete, new[] by delete[]. |
40 | // |
41 | // InnerPointerChecker interacts very closely with MallocChecker, but unlike |
42 | // the above checkers, it has it's own file, hence the many InnerPointerChecker |
43 | // related headers and non-static functions. |
44 | // |
45 | //===----------------------------------------------------------------------===// |
46 | |
47 | #include "AllocationState.h" |
48 | #include "InterCheckerAPI.h" |
49 | #include "NoOwnershipChangeVisitor.h" |
50 | #include "clang/AST/Attr.h" |
51 | #include "clang/AST/DeclCXX.h" |
52 | #include "clang/AST/DeclTemplate.h" |
53 | #include "clang/AST/Expr.h" |
54 | #include "clang/AST/ExprCXX.h" |
55 | #include "clang/AST/ParentMap.h" |
56 | #include "clang/ASTMatchers/ASTMatchFinder.h" |
57 | #include "clang/ASTMatchers/ASTMatchers.h" |
58 | #include "clang/Analysis/ProgramPoint.h" |
59 | #include "clang/Basic/LLVM.h" |
60 | #include "clang/Basic/SourceManager.h" |
61 | #include "clang/Basic/TargetInfo.h" |
62 | #include "clang/Lex/Lexer.h" |
63 | #include "clang/StaticAnalyzer/Checkers/BuiltinCheckerRegistration.h" |
64 | #include "clang/StaticAnalyzer/Checkers/Taint.h" |
65 | #include "clang/StaticAnalyzer/Core/BugReporter/BugType.h" |
66 | #include "clang/StaticAnalyzer/Core/BugReporter/CommonBugCategories.h" |
67 | #include "clang/StaticAnalyzer/Core/Checker.h" |
68 | #include "clang/StaticAnalyzer/Core/CheckerManager.h" |
69 | #include "clang/StaticAnalyzer/Core/PathSensitive/CallDescription.h" |
70 | #include "clang/StaticAnalyzer/Core/PathSensitive/CallEvent.h" |
71 | #include "clang/StaticAnalyzer/Core/PathSensitive/CheckerContext.h" |
72 | #include "clang/StaticAnalyzer/Core/PathSensitive/CheckerHelpers.h" |
73 | #include "clang/StaticAnalyzer/Core/PathSensitive/DynamicExtent.h" |
74 | #include "clang/StaticAnalyzer/Core/PathSensitive/ExplodedGraph.h" |
75 | #include "clang/StaticAnalyzer/Core/PathSensitive/ProgramState.h" |
76 | #include "clang/StaticAnalyzer/Core/PathSensitive/ProgramStateTrait.h" |
77 | #include "clang/StaticAnalyzer/Core/PathSensitive/ProgramState_Fwd.h" |
78 | #include "clang/StaticAnalyzer/Core/PathSensitive/SVals.h" |
79 | #include "clang/StaticAnalyzer/Core/PathSensitive/SymbolManager.h" |
80 | #include "llvm/ADT/STLExtras.h" |
81 | #include "llvm/ADT/StringExtras.h" |
82 | #include "llvm/Support/Casting.h" |
83 | #include "llvm/Support/Compiler.h" |
84 | #include "llvm/Support/ErrorHandling.h" |
85 | #include "llvm/Support/raw_ostream.h" |
86 | #include <functional> |
87 | #include <optional> |
88 | #include <utility> |
89 | |
90 | using namespace clang; |
91 | using namespace ento; |
92 | using namespace std::placeholders; |
93 | |
94 | //===----------------------------------------------------------------------===// |
95 | // The types of allocation we're modeling. This is used to check whether a |
96 | // dynamically allocated object is deallocated with the correct function, like |
97 | // not using operator delete on an object created by malloc(), or alloca regions |
98 | // aren't ever deallocated manually. |
99 | //===----------------------------------------------------------------------===// |
100 | |
101 | namespace { |
102 | |
103 | // Used to check correspondence between allocators and deallocators. |
104 | enum AllocationFamilyKind { |
105 | AF_None, |
106 | AF_Malloc, |
107 | AF_CXXNew, |
108 | AF_CXXNewArray, |
109 | AF_IfNameIndex, |
110 | AF_Alloca, |
111 | AF_InnerBuffer, |
112 | AF_Custom, |
113 | }; |
114 | |
115 | struct AllocationFamily { |
116 | AllocationFamilyKind Kind; |
117 | std::optional<StringRef> CustomName; |
118 | |
119 | explicit AllocationFamily(AllocationFamilyKind AKind, |
120 | std::optional<StringRef> Name = std::nullopt) |
121 | : Kind(AKind), CustomName(Name) { |
122 | assert((Kind != AF_Custom || CustomName.has_value()) && |
123 | "Custom family must specify also the name" ); |
124 | |
125 | // Preseve previous behavior when "malloc" class means AF_Malloc |
126 | if (Kind == AF_Custom && CustomName.value() == "malloc" ) { |
127 | Kind = AF_Malloc; |
128 | CustomName = std::nullopt; |
129 | } |
130 | } |
131 | |
132 | bool operator==(const AllocationFamily &Other) const { |
133 | return std::tie(args: Kind, args: CustomName) == std::tie(args: Other.Kind, args: Other.CustomName); |
134 | } |
135 | |
136 | bool operator!=(const AllocationFamily &Other) const { |
137 | return !(*this == Other); |
138 | } |
139 | |
140 | void Profile(llvm::FoldingSetNodeID &ID) const { |
141 | ID.AddInteger(I: Kind); |
142 | |
143 | if (Kind == AF_Custom) |
144 | ID.AddString(String: CustomName.value()); |
145 | } |
146 | }; |
147 | |
148 | } // end of anonymous namespace |
149 | |
150 | /// Print names of allocators and deallocators. |
151 | /// |
152 | /// \returns true on success. |
153 | static bool printMemFnName(raw_ostream &os, CheckerContext &C, const Expr *E); |
154 | |
155 | /// Print expected name of an allocator based on the deallocator's family |
156 | /// derived from the DeallocExpr. |
157 | static void printExpectedAllocName(raw_ostream &os, AllocationFamily Family); |
158 | |
159 | /// Print expected name of a deallocator based on the allocator's |
160 | /// family. |
161 | static void printExpectedDeallocName(raw_ostream &os, AllocationFamily Family); |
162 | |
163 | //===----------------------------------------------------------------------===// |
164 | // The state of a symbol, in terms of memory management. |
165 | //===----------------------------------------------------------------------===// |
166 | |
167 | namespace { |
168 | |
169 | class RefState { |
170 | enum Kind { |
171 | // Reference to allocated memory. |
172 | Allocated, |
173 | // Reference to zero-allocated memory. |
174 | AllocatedOfSizeZero, |
175 | // Reference to released/freed memory. |
176 | Released, |
177 | // The responsibility for freeing resources has transferred from |
178 | // this reference. A relinquished symbol should not be freed. |
179 | Relinquished, |
180 | // We are no longer guaranteed to have observed all manipulations |
181 | // of this pointer/memory. For example, it could have been |
182 | // passed as a parameter to an opaque function. |
183 | Escaped |
184 | }; |
185 | |
186 | const Stmt *S; |
187 | |
188 | Kind K; |
189 | AllocationFamily Family; |
190 | |
191 | RefState(Kind k, const Stmt *s, AllocationFamily family) |
192 | : S(s), K(k), Family(family) { |
193 | assert(family.Kind != AF_None); |
194 | } |
195 | |
196 | public: |
197 | bool isAllocated() const { return K == Allocated; } |
198 | bool isAllocatedOfSizeZero() const { return K == AllocatedOfSizeZero; } |
199 | bool isReleased() const { return K == Released; } |
200 | bool isRelinquished() const { return K == Relinquished; } |
201 | bool isEscaped() const { return K == Escaped; } |
202 | AllocationFamily getAllocationFamily() const { return Family; } |
203 | const Stmt *getStmt() const { return S; } |
204 | |
205 | bool operator==(const RefState &X) const { |
206 | return K == X.K && S == X.S && Family == X.Family; |
207 | } |
208 | |
209 | static RefState getAllocated(AllocationFamily family, const Stmt *s) { |
210 | return RefState(Allocated, s, family); |
211 | } |
212 | static RefState getAllocatedOfSizeZero(const RefState *RS) { |
213 | return RefState(AllocatedOfSizeZero, RS->getStmt(), |
214 | RS->getAllocationFamily()); |
215 | } |
216 | static RefState getReleased(AllocationFamily family, const Stmt *s) { |
217 | return RefState(Released, s, family); |
218 | } |
219 | static RefState getRelinquished(AllocationFamily family, const Stmt *s) { |
220 | return RefState(Relinquished, s, family); |
221 | } |
222 | static RefState getEscaped(const RefState *RS) { |
223 | return RefState(Escaped, RS->getStmt(), RS->getAllocationFamily()); |
224 | } |
225 | |
226 | void Profile(llvm::FoldingSetNodeID &ID) const { |
227 | ID.AddInteger(I: K); |
228 | ID.AddPointer(Ptr: S); |
229 | Family.Profile(ID); |
230 | } |
231 | |
232 | LLVM_DUMP_METHOD void dump(raw_ostream &OS) const { |
233 | switch (K) { |
234 | #define CASE(ID) case ID: OS << #ID; break; |
235 | CASE(Allocated) |
236 | CASE(AllocatedOfSizeZero) |
237 | CASE(Released) |
238 | CASE(Relinquished) |
239 | CASE(Escaped) |
240 | } |
241 | } |
242 | |
243 | LLVM_DUMP_METHOD void dump() const { dump(OS&: llvm::errs()); } |
244 | }; |
245 | |
246 | } // end of anonymous namespace |
247 | |
248 | REGISTER_MAP_WITH_PROGRAMSTATE(RegionState, SymbolRef, RefState) |
249 | |
250 | /// Check if the memory associated with this symbol was released. |
251 | static bool isReleased(SymbolRef Sym, CheckerContext &C); |
252 | |
253 | /// Update the RefState to reflect the new memory allocation. |
254 | /// The optional \p RetVal parameter specifies the newly allocated pointer |
255 | /// value; if unspecified, the value of expression \p E is used. |
256 | static ProgramStateRef |
257 | MallocUpdateRefState(CheckerContext &C, const Expr *E, ProgramStateRef State, |
258 | AllocationFamily Family, |
259 | std::optional<SVal> RetVal = std::nullopt); |
260 | |
261 | //===----------------------------------------------------------------------===// |
262 | // The modeling of memory reallocation. |
263 | // |
264 | // The terminology 'toPtr' and 'fromPtr' will be used: |
265 | // toPtr = realloc(fromPtr, 20); |
266 | //===----------------------------------------------------------------------===// |
267 | |
268 | REGISTER_SET_WITH_PROGRAMSTATE(ReallocSizeZeroSymbols, SymbolRef) |
269 | |
270 | namespace { |
271 | |
272 | /// The state of 'fromPtr' after reallocation is known to have failed. |
273 | enum OwnershipAfterReallocKind { |
274 | // The symbol needs to be freed (e.g.: realloc) |
275 | OAR_ToBeFreedAfterFailure, |
276 | // The symbol has been freed (e.g.: reallocf) |
277 | OAR_FreeOnFailure, |
278 | // The symbol doesn't have to freed (e.g.: we aren't sure if, how and where |
279 | // 'fromPtr' was allocated: |
280 | // void Haha(int *ptr) { |
281 | // ptr = realloc(ptr, 67); |
282 | // // ... |
283 | // } |
284 | // ). |
285 | OAR_DoNotTrackAfterFailure |
286 | }; |
287 | |
288 | /// Stores information about the 'fromPtr' symbol after reallocation. |
289 | /// |
290 | /// This is important because realloc may fail, and that needs special modeling. |
291 | /// Whether reallocation failed or not will not be known until later, so we'll |
292 | /// store whether upon failure 'fromPtr' will be freed, or needs to be freed |
293 | /// later, etc. |
294 | struct ReallocPair { |
295 | |
296 | // The 'fromPtr'. |
297 | SymbolRef ReallocatedSym; |
298 | OwnershipAfterReallocKind Kind; |
299 | |
300 | ReallocPair(SymbolRef S, OwnershipAfterReallocKind K) |
301 | : ReallocatedSym(S), Kind(K) {} |
302 | void Profile(llvm::FoldingSetNodeID &ID) const { |
303 | ID.AddInteger(I: Kind); |
304 | ID.AddPointer(Ptr: ReallocatedSym); |
305 | } |
306 | bool operator==(const ReallocPair &X) const { |
307 | return ReallocatedSym == X.ReallocatedSym && |
308 | Kind == X.Kind; |
309 | } |
310 | }; |
311 | |
312 | } // end of anonymous namespace |
313 | |
314 | REGISTER_MAP_WITH_PROGRAMSTATE(ReallocPairs, SymbolRef, ReallocPair) |
315 | |
316 | static bool isStandardNew(const FunctionDecl *FD); |
317 | static bool isStandardNew(const CallEvent &Call) { |
318 | if (!Call.getDecl() || !isa<FunctionDecl>(Val: Call.getDecl())) |
319 | return false; |
320 | return isStandardNew(FD: cast<FunctionDecl>(Val: Call.getDecl())); |
321 | } |
322 | |
323 | static bool isStandardDelete(const FunctionDecl *FD); |
324 | static bool isStandardDelete(const CallEvent &Call) { |
325 | if (!Call.getDecl() || !isa<FunctionDecl>(Val: Call.getDecl())) |
326 | return false; |
327 | return isStandardDelete(FD: cast<FunctionDecl>(Val: Call.getDecl())); |
328 | } |
329 | |
330 | /// Tells if the callee is one of the builtin new/delete operators, including |
331 | /// placement operators and other standard overloads. |
332 | template <typename T> static bool isStandardNewDelete(const T &FD) { |
333 | return isStandardDelete(FD) || isStandardNew(FD); |
334 | } |
335 | |
336 | //===----------------------------------------------------------------------===// |
337 | // Definition of the MallocChecker class. |
338 | //===----------------------------------------------------------------------===// |
339 | |
340 | namespace { |
341 | |
342 | class MallocChecker |
343 | : public Checker<check::DeadSymbols, check::PointerEscape, |
344 | check::ConstPointerEscape, check::PreStmt<ReturnStmt>, |
345 | check::EndFunction, check::PreCall, check::PostCall, |
346 | eval::Call, check::NewAllocator, |
347 | check::PostStmt<BlockExpr>, check::PostObjCMessage, |
348 | check::Location, eval::Assume> { |
349 | public: |
350 | /// In pessimistic mode, the checker assumes that it does not know which |
351 | /// functions might free the memory. |
352 | /// In optimistic mode, the checker assumes that all user-defined functions |
353 | /// which might free a pointer are annotated. |
354 | bool ShouldIncludeOwnershipAnnotatedFunctions = false; |
355 | |
356 | bool ShouldRegisterNoOwnershipChangeVisitor = false; |
357 | |
358 | /// Many checkers are essentially built into this one, so enabling them will |
359 | /// make MallocChecker perform additional modeling and reporting. |
360 | enum CheckKind { |
361 | /// When a subchecker is enabled but MallocChecker isn't, model memory |
362 | /// management but do not emit warnings emitted with MallocChecker only |
363 | /// enabled. |
364 | CK_MallocChecker, |
365 | CK_NewDeleteChecker, |
366 | CK_NewDeleteLeaksChecker, |
367 | CK_MismatchedDeallocatorChecker, |
368 | CK_InnerPointerChecker, |
369 | CK_TaintedAllocChecker, |
370 | CK_NumCheckKinds |
371 | }; |
372 | |
373 | using LeakInfo = std::pair<const ExplodedNode *, const MemRegion *>; |
374 | |
375 | bool ChecksEnabled[CK_NumCheckKinds] = {false}; |
376 | CheckerNameRef CheckNames[CK_NumCheckKinds]; |
377 | |
378 | void checkPreCall(const CallEvent &Call, CheckerContext &C) const; |
379 | void checkPostCall(const CallEvent &Call, CheckerContext &C) const; |
380 | bool evalCall(const CallEvent &Call, CheckerContext &C) const; |
381 | void checkNewAllocator(const CXXAllocatorCall &Call, CheckerContext &C) const; |
382 | void checkPostObjCMessage(const ObjCMethodCall &Call, CheckerContext &C) const; |
383 | void checkPostStmt(const BlockExpr *BE, CheckerContext &C) const; |
384 | void checkDeadSymbols(SymbolReaper &SymReaper, CheckerContext &C) const; |
385 | void checkPreStmt(const ReturnStmt *S, CheckerContext &C) const; |
386 | void checkEndFunction(const ReturnStmt *S, CheckerContext &C) const; |
387 | ProgramStateRef evalAssume(ProgramStateRef state, SVal Cond, |
388 | bool Assumption) const; |
389 | void checkLocation(SVal l, bool isLoad, const Stmt *S, |
390 | CheckerContext &C) const; |
391 | |
392 | ProgramStateRef checkPointerEscape(ProgramStateRef State, |
393 | const InvalidatedSymbols &Escaped, |
394 | const CallEvent *Call, |
395 | PointerEscapeKind Kind) const; |
396 | ProgramStateRef checkConstPointerEscape(ProgramStateRef State, |
397 | const InvalidatedSymbols &Escaped, |
398 | const CallEvent *Call, |
399 | PointerEscapeKind Kind) const; |
400 | |
401 | void printState(raw_ostream &Out, ProgramStateRef State, |
402 | const char *NL, const char *Sep) const override; |
403 | |
404 | private: |
405 | mutable std::unique_ptr<BugType> BT_DoubleFree[CK_NumCheckKinds]; |
406 | mutable std::unique_ptr<BugType> BT_DoubleDelete; |
407 | mutable std::unique_ptr<BugType> BT_Leak[CK_NumCheckKinds]; |
408 | mutable std::unique_ptr<BugType> BT_UseFree[CK_NumCheckKinds]; |
409 | mutable std::unique_ptr<BugType> BT_BadFree[CK_NumCheckKinds]; |
410 | mutable std::unique_ptr<BugType> BT_FreeAlloca[CK_NumCheckKinds]; |
411 | mutable std::unique_ptr<BugType> BT_MismatchedDealloc; |
412 | mutable std::unique_ptr<BugType> BT_OffsetFree[CK_NumCheckKinds]; |
413 | mutable std::unique_ptr<BugType> BT_UseZerroAllocated[CK_NumCheckKinds]; |
414 | mutable std::unique_ptr<BugType> BT_TaintedAlloc; |
415 | |
416 | #define CHECK_FN(NAME) \ |
417 | void NAME(ProgramStateRef State, const CallEvent &Call, CheckerContext &C) \ |
418 | const; |
419 | |
420 | CHECK_FN(checkFree) |
421 | CHECK_FN(checkIfNameIndex) |
422 | CHECK_FN(checkBasicAlloc) |
423 | CHECK_FN(checkKernelMalloc) |
424 | CHECK_FN(checkCalloc) |
425 | CHECK_FN(checkAlloca) |
426 | CHECK_FN(checkStrdup) |
427 | CHECK_FN(checkIfFreeNameIndex) |
428 | CHECK_FN(checkCXXNewOrCXXDelete) |
429 | CHECK_FN(checkGMalloc0) |
430 | CHECK_FN(checkGMemdup) |
431 | CHECK_FN(checkGMallocN) |
432 | CHECK_FN(checkGMallocN0) |
433 | CHECK_FN(preGetDelimOrGetLine) |
434 | CHECK_FN(checkGetDelimOrGetLine) |
435 | CHECK_FN(checkReallocN) |
436 | CHECK_FN(checkOwnershipAttr) |
437 | |
438 | void checkRealloc(ProgramStateRef State, const CallEvent &Call, |
439 | CheckerContext &C, bool ShouldFreeOnFail) const; |
440 | |
441 | using CheckFn = |
442 | std::function<void(const MallocChecker *, ProgramStateRef State, |
443 | const CallEvent &Call, CheckerContext &C)>; |
444 | |
445 | const CallDescriptionMap<CheckFn> PreFnMap{ |
446 | // NOTE: the following CallDescription also matches the C++ standard |
447 | // library function std::getline(); the callback will filter it out. |
448 | {{CDM::CLibrary, {"getline" }, 3}, &MallocChecker::preGetDelimOrGetLine}, |
449 | {{CDM::CLibrary, {"getdelim" }, 4}, &MallocChecker::preGetDelimOrGetLine}, |
450 | }; |
451 | |
452 | const CallDescriptionMap<CheckFn> PostFnMap{ |
453 | // NOTE: the following CallDescription also matches the C++ standard |
454 | // library function std::getline(); the callback will filter it out. |
455 | {{CDM::CLibrary, {"getline" }, 3}, &MallocChecker::checkGetDelimOrGetLine}, |
456 | {{CDM::CLibrary, {"getdelim" }, 4}, |
457 | &MallocChecker::checkGetDelimOrGetLine}, |
458 | }; |
459 | |
460 | const CallDescriptionMap<CheckFn> FreeingMemFnMap{ |
461 | {{CDM::CLibrary, {"free" }, 1}, &MallocChecker::checkFree}, |
462 | {{CDM::CLibrary, {"if_freenameindex" }, 1}, |
463 | &MallocChecker::checkIfFreeNameIndex}, |
464 | {{CDM::CLibrary, {"kfree" }, 1}, &MallocChecker::checkFree}, |
465 | {{CDM::CLibrary, {"g_free" }, 1}, &MallocChecker::checkFree}, |
466 | }; |
467 | |
468 | bool isFreeingCall(const CallEvent &Call) const; |
469 | static bool isFreeingOwnershipAttrCall(const FunctionDecl *Func); |
470 | static bool isFreeingOwnershipAttrCall(const CallEvent &Call); |
471 | static bool isAllocatingOwnershipAttrCall(const FunctionDecl *Func); |
472 | static bool isAllocatingOwnershipAttrCall(const CallEvent &Call); |
473 | |
474 | friend class NoMemOwnershipChangeVisitor; |
475 | |
476 | CallDescriptionMap<CheckFn> AllocaMemFnMap{ |
477 | {{CDM::CLibrary, {"alloca" }, 1}, &MallocChecker::checkAlloca}, |
478 | {{CDM::CLibrary, {"_alloca" }, 1}, &MallocChecker::checkAlloca}, |
479 | // The line for "alloca" also covers "__builtin_alloca", but the |
480 | // _with_align variant must be listed separately because it takes an |
481 | // extra argument: |
482 | {{CDM::CLibrary, {"__builtin_alloca_with_align" }, 2}, |
483 | &MallocChecker::checkAlloca}, |
484 | }; |
485 | |
486 | CallDescriptionMap<CheckFn> AllocatingMemFnMap{ |
487 | {{CDM::CLibrary, {"malloc" }, 1}, &MallocChecker::checkBasicAlloc}, |
488 | {{CDM::CLibrary, {"malloc" }, 3}, &MallocChecker::checkKernelMalloc}, |
489 | {{CDM::CLibrary, {"calloc" }, 2}, &MallocChecker::checkCalloc}, |
490 | {{CDM::CLibrary, {"valloc" }, 1}, &MallocChecker::checkBasicAlloc}, |
491 | {{CDM::CLibrary, {"strndup" }, 2}, &MallocChecker::checkStrdup}, |
492 | {{CDM::CLibrary, {"strdup" }, 1}, &MallocChecker::checkStrdup}, |
493 | {{CDM::CLibrary, {"_strdup" }, 1}, &MallocChecker::checkStrdup}, |
494 | {{CDM::CLibrary, {"kmalloc" }, 2}, &MallocChecker::checkKernelMalloc}, |
495 | {{CDM::CLibrary, {"if_nameindex" }, 1}, &MallocChecker::checkIfNameIndex}, |
496 | {{CDM::CLibrary, {"wcsdup" }, 1}, &MallocChecker::checkStrdup}, |
497 | {{CDM::CLibrary, {"_wcsdup" }, 1}, &MallocChecker::checkStrdup}, |
498 | {{CDM::CLibrary, {"g_malloc" }, 1}, &MallocChecker::checkBasicAlloc}, |
499 | {{CDM::CLibrary, {"g_malloc0" }, 1}, &MallocChecker::checkGMalloc0}, |
500 | {{CDM::CLibrary, {"g_try_malloc" }, 1}, &MallocChecker::checkBasicAlloc}, |
501 | {{CDM::CLibrary, {"g_try_malloc0" }, 1}, &MallocChecker::checkGMalloc0}, |
502 | {{CDM::CLibrary, {"g_memdup" }, 2}, &MallocChecker::checkGMemdup}, |
503 | {{CDM::CLibrary, {"g_malloc_n" }, 2}, &MallocChecker::checkGMallocN}, |
504 | {{CDM::CLibrary, {"g_malloc0_n" }, 2}, &MallocChecker::checkGMallocN0}, |
505 | {{CDM::CLibrary, {"g_try_malloc_n" }, 2}, &MallocChecker::checkGMallocN}, |
506 | {{CDM::CLibrary, {"g_try_malloc0_n" }, 2}, &MallocChecker::checkGMallocN0}, |
507 | }; |
508 | |
509 | CallDescriptionMap<CheckFn> ReallocatingMemFnMap{ |
510 | {{CDM::CLibrary, {"realloc" }, 2}, |
511 | std::bind(f: &MallocChecker::checkRealloc, args: _1, args: _2, args: _3, args: _4, args: false)}, |
512 | {{CDM::CLibrary, {"reallocf" }, 2}, |
513 | std::bind(f: &MallocChecker::checkRealloc, args: _1, args: _2, args: _3, args: _4, args: true)}, |
514 | {{CDM::CLibrary, {"g_realloc" }, 2}, |
515 | std::bind(f: &MallocChecker::checkRealloc, args: _1, args: _2, args: _3, args: _4, args: false)}, |
516 | {{CDM::CLibrary, {"g_try_realloc" }, 2}, |
517 | std::bind(f: &MallocChecker::checkRealloc, args: _1, args: _2, args: _3, args: _4, args: false)}, |
518 | {{CDM::CLibrary, {"g_realloc_n" }, 3}, &MallocChecker::checkReallocN}, |
519 | {{CDM::CLibrary, {"g_try_realloc_n" }, 3}, &MallocChecker::checkReallocN}, |
520 | }; |
521 | |
522 | bool isMemCall(const CallEvent &Call) const; |
523 | bool hasOwnershipReturns(const CallEvent &Call) const; |
524 | bool hasOwnershipTakesHolds(const CallEvent &Call) const; |
525 | void reportTaintBug(StringRef Msg, ProgramStateRef State, CheckerContext &C, |
526 | llvm::ArrayRef<SymbolRef> TaintedSyms, |
527 | AllocationFamily Family) const; |
528 | |
529 | void checkTaintedness(CheckerContext &C, const CallEvent &Call, |
530 | const SVal SizeSVal, ProgramStateRef State, |
531 | AllocationFamily Family) const; |
532 | |
533 | // TODO: Remove mutable by moving the initializtaion to the registry function. |
534 | mutable std::optional<uint64_t> KernelZeroFlagVal; |
535 | |
536 | using KernelZeroSizePtrValueTy = std::optional<int>; |
537 | /// Store the value of macro called `ZERO_SIZE_PTR`. |
538 | /// The value is initialized at first use, before first use the outer |
539 | /// Optional is empty, afterwards it contains another Optional that indicates |
540 | /// if the macro value could be determined, and if yes the value itself. |
541 | mutable std::optional<KernelZeroSizePtrValueTy> KernelZeroSizePtrValue; |
542 | |
543 | /// Process C++ operator new()'s allocation, which is the part of C++ |
544 | /// new-expression that goes before the constructor. |
545 | [[nodiscard]] ProgramStateRef |
546 | processNewAllocation(const CXXAllocatorCall &Call, CheckerContext &C, |
547 | AllocationFamily Family) const; |
548 | |
549 | /// Perform a zero-allocation check. |
550 | /// |
551 | /// \param [in] Call The expression that allocates memory. |
552 | /// \param [in] IndexOfSizeArg Index of the argument that specifies the size |
553 | /// of the memory that needs to be allocated. E.g. for malloc, this would be |
554 | /// 0. |
555 | /// \param [in] RetVal Specifies the newly allocated pointer value; |
556 | /// if unspecified, the value of expression \p E is used. |
557 | [[nodiscard]] static ProgramStateRef |
558 | ProcessZeroAllocCheck(CheckerContext &C, const CallEvent &Call, |
559 | const unsigned IndexOfSizeArg, ProgramStateRef State, |
560 | std::optional<SVal> RetVal = std::nullopt); |
561 | |
562 | /// Model functions with the ownership_returns attribute. |
563 | /// |
564 | /// User-defined function may have the ownership_returns attribute, which |
565 | /// annotates that the function returns with an object that was allocated on |
566 | /// the heap, and passes the ownertship to the callee. |
567 | /// |
568 | /// void __attribute((ownership_returns(malloc, 1))) *my_malloc(size_t); |
569 | /// |
570 | /// It has two parameters: |
571 | /// - first: name of the resource (e.g. 'malloc') |
572 | /// - (OPTIONAL) second: size of the allocated region |
573 | /// |
574 | /// \param [in] Call The expression that allocates memory. |
575 | /// \param [in] Att The ownership_returns attribute. |
576 | /// \param [in] State The \c ProgramState right before allocation. |
577 | /// \returns The ProgramState right after allocation. |
578 | [[nodiscard]] ProgramStateRef |
579 | MallocMemReturnsAttr(CheckerContext &C, const CallEvent &Call, |
580 | const OwnershipAttr *Att, ProgramStateRef State) const; |
581 | /// Models memory allocation. |
582 | /// |
583 | /// \param [in] C Checker context. |
584 | /// \param [in] Call The expression that allocates memory. |
585 | /// \param [in] State The \c ProgramState right before allocation. |
586 | /// \param [in] isAlloca Is the allocation function alloca-like |
587 | /// \returns The ProgramState with returnValue bound |
588 | [[nodiscard]] ProgramStateRef MallocBindRetVal(CheckerContext &C, |
589 | const CallEvent &Call, |
590 | ProgramStateRef State, |
591 | bool isAlloca) const; |
592 | |
593 | /// Models memory allocation. |
594 | /// |
595 | /// \param [in] Call The expression that allocates memory. |
596 | /// \param [in] SizeEx Size of the memory that needs to be allocated. |
597 | /// \param [in] Init The value the allocated memory needs to be initialized. |
598 | /// with. For example, \c calloc initializes the allocated memory to 0, |
599 | /// malloc leaves it undefined. |
600 | /// \param [in] State The \c ProgramState right before allocation. |
601 | /// \returns The ProgramState right after allocation. |
602 | [[nodiscard]] ProgramStateRef |
603 | MallocMemAux(CheckerContext &C, const CallEvent &Call, const Expr *SizeEx, |
604 | SVal Init, ProgramStateRef State, AllocationFamily Family) const; |
605 | |
606 | /// Models memory allocation. |
607 | /// |
608 | /// \param [in] Call The expression that allocates memory. |
609 | /// \param [in] Size Size of the memory that needs to be allocated. |
610 | /// \param [in] Init The value the allocated memory needs to be initialized. |
611 | /// with. For example, \c calloc initializes the allocated memory to 0, |
612 | /// malloc leaves it undefined. |
613 | /// \param [in] State The \c ProgramState right before allocation. |
614 | /// \returns The ProgramState right after allocation. |
615 | [[nodiscard]] ProgramStateRef MallocMemAux(CheckerContext &C, |
616 | const CallEvent &Call, SVal Size, |
617 | SVal Init, ProgramStateRef State, |
618 | AllocationFamily Family) const; |
619 | |
620 | // Check if this malloc() for special flags. At present that means M_ZERO or |
621 | // __GFP_ZERO (in which case, treat it like calloc). |
622 | [[nodiscard]] std::optional<ProgramStateRef> |
623 | performKernelMalloc(const CallEvent &Call, CheckerContext &C, |
624 | const ProgramStateRef &State) const; |
625 | |
626 | /// Model functions with the ownership_takes and ownership_holds attributes. |
627 | /// |
628 | /// User-defined function may have the ownership_takes and/or ownership_holds |
629 | /// attributes, which annotates that the function frees the memory passed as a |
630 | /// parameter. |
631 | /// |
632 | /// void __attribute((ownership_takes(malloc, 1))) my_free(void *); |
633 | /// void __attribute((ownership_holds(malloc, 1))) my_hold(void *); |
634 | /// |
635 | /// They have two parameters: |
636 | /// - first: name of the resource (e.g. 'malloc') |
637 | /// - second: index of the parameter the attribute applies to |
638 | /// |
639 | /// \param [in] Call The expression that frees memory. |
640 | /// \param [in] Att The ownership_takes or ownership_holds attribute. |
641 | /// \param [in] State The \c ProgramState right before allocation. |
642 | /// \returns The ProgramState right after deallocation. |
643 | [[nodiscard]] ProgramStateRef FreeMemAttr(CheckerContext &C, |
644 | const CallEvent &Call, |
645 | const OwnershipAttr *Att, |
646 | ProgramStateRef State) const; |
647 | |
648 | /// Models memory deallocation. |
649 | /// |
650 | /// \param [in] Call The expression that frees memory. |
651 | /// \param [in] State The \c ProgramState right before allocation. |
652 | /// \param [in] Num Index of the argument that needs to be freed. This is |
653 | /// normally 0, but for custom free functions it may be different. |
654 | /// \param [in] Hold Whether the parameter at \p Index has the ownership_holds |
655 | /// attribute. |
656 | /// \param [out] IsKnownToBeAllocated Whether the memory to be freed is known |
657 | /// to have been allocated, or in other words, the symbol to be freed was |
658 | /// registered as allocated by this checker. In the following case, \c ptr |
659 | /// isn't known to be allocated. |
660 | /// void Haha(int *ptr) { |
661 | /// ptr = realloc(ptr, 67); |
662 | /// // ... |
663 | /// } |
664 | /// \param [in] ReturnsNullOnFailure Whether the memory deallocation function |
665 | /// we're modeling returns with Null on failure. |
666 | /// \returns The ProgramState right after deallocation. |
667 | [[nodiscard]] ProgramStateRef |
668 | FreeMemAux(CheckerContext &C, const CallEvent &Call, ProgramStateRef State, |
669 | unsigned Num, bool Hold, bool &IsKnownToBeAllocated, |
670 | AllocationFamily Family, bool ReturnsNullOnFailure = false) const; |
671 | |
672 | /// Models memory deallocation. |
673 | /// |
674 | /// \param [in] ArgExpr The variable who's pointee needs to be freed. |
675 | /// \param [in] Call The expression that frees the memory. |
676 | /// \param [in] State The \c ProgramState right before allocation. |
677 | /// normally 0, but for custom free functions it may be different. |
678 | /// \param [in] Hold Whether the parameter at \p Index has the ownership_holds |
679 | /// attribute. |
680 | /// \param [out] IsKnownToBeAllocated Whether the memory to be freed is known |
681 | /// to have been allocated, or in other words, the symbol to be freed was |
682 | /// registered as allocated by this checker. In the following case, \c ptr |
683 | /// isn't known to be allocated. |
684 | /// void Haha(int *ptr) { |
685 | /// ptr = realloc(ptr, 67); |
686 | /// // ... |
687 | /// } |
688 | /// \param [in] ReturnsNullOnFailure Whether the memory deallocation function |
689 | /// we're modeling returns with Null on failure. |
690 | /// \param [in] ArgValOpt Optional value to use for the argument instead of |
691 | /// the one obtained from ArgExpr. |
692 | /// \returns The ProgramState right after deallocation. |
693 | [[nodiscard]] ProgramStateRef |
694 | FreeMemAux(CheckerContext &C, const Expr *ArgExpr, const CallEvent &Call, |
695 | ProgramStateRef State, bool Hold, bool &IsKnownToBeAllocated, |
696 | AllocationFamily Family, bool ReturnsNullOnFailure = false, |
697 | std::optional<SVal> ArgValOpt = {}) const; |
698 | |
699 | // TODO: Needs some refactoring, as all other deallocation modeling |
700 | // functions are suffering from out parameters and messy code due to how |
701 | // realloc is handled. |
702 | // |
703 | /// Models memory reallocation. |
704 | /// |
705 | /// \param [in] Call The expression that reallocated memory |
706 | /// \param [in] ShouldFreeOnFail Whether if reallocation fails, the supplied |
707 | /// memory should be freed. |
708 | /// \param [in] State The \c ProgramState right before reallocation. |
709 | /// \param [in] SuffixWithN Whether the reallocation function we're modeling |
710 | /// has an '_n' suffix, such as g_realloc_n. |
711 | /// \returns The ProgramState right after reallocation. |
712 | [[nodiscard]] ProgramStateRef |
713 | ReallocMemAux(CheckerContext &C, const CallEvent &Call, bool ShouldFreeOnFail, |
714 | ProgramStateRef State, AllocationFamily Family, |
715 | bool SuffixWithN = false) const; |
716 | |
717 | /// Evaluates the buffer size that needs to be allocated. |
718 | /// |
719 | /// \param [in] Blocks The amount of blocks that needs to be allocated. |
720 | /// \param [in] BlockBytes The size of a block. |
721 | /// \returns The symbolic value of \p Blocks * \p BlockBytes. |
722 | [[nodiscard]] static SVal evalMulForBufferSize(CheckerContext &C, |
723 | const Expr *Blocks, |
724 | const Expr *BlockBytes); |
725 | |
726 | /// Models zero initialized array allocation. |
727 | /// |
728 | /// \param [in] Call The expression that reallocated memory |
729 | /// \param [in] State The \c ProgramState right before reallocation. |
730 | /// \returns The ProgramState right after allocation. |
731 | [[nodiscard]] ProgramStateRef CallocMem(CheckerContext &C, |
732 | const CallEvent &Call, |
733 | ProgramStateRef State) const; |
734 | |
735 | /// See if deallocation happens in a suspicious context. If so, escape the |
736 | /// pointers that otherwise would have been deallocated and return true. |
737 | bool suppressDeallocationsInSuspiciousContexts(const CallEvent &Call, |
738 | CheckerContext &C) const; |
739 | |
740 | /// If in \p S \p Sym is used, check whether \p Sym was already freed. |
741 | bool checkUseAfterFree(SymbolRef Sym, CheckerContext &C, const Stmt *S) const; |
742 | |
743 | /// If in \p S \p Sym is used, check whether \p Sym was allocated as a zero |
744 | /// sized memory region. |
745 | void checkUseZeroAllocated(SymbolRef Sym, CheckerContext &C, |
746 | const Stmt *S) const; |
747 | |
748 | /// If in \p S \p Sym is being freed, check whether \p Sym was already freed. |
749 | bool checkDoubleDelete(SymbolRef Sym, CheckerContext &C) const; |
750 | |
751 | /// Check if the function is known to free memory, or if it is |
752 | /// "interesting" and should be modeled explicitly. |
753 | /// |
754 | /// \param [out] EscapingSymbol A function might not free memory in general, |
755 | /// but could be known to free a particular symbol. In this case, false is |
756 | /// returned and the single escaping symbol is returned through the out |
757 | /// parameter. |
758 | /// |
759 | /// We assume that pointers do not escape through calls to system functions |
760 | /// not handled by this checker. |
761 | bool mayFreeAnyEscapedMemoryOrIsModeledExplicitly(const CallEvent *Call, |
762 | ProgramStateRef State, |
763 | SymbolRef &EscapingSymbol) const; |
764 | |
765 | /// Implementation of the checkPointerEscape callbacks. |
766 | [[nodiscard]] ProgramStateRef |
767 | checkPointerEscapeAux(ProgramStateRef State, |
768 | const InvalidatedSymbols &Escaped, |
769 | const CallEvent *Call, PointerEscapeKind Kind, |
770 | bool IsConstPointerEscape) const; |
771 | |
772 | // Implementation of the checkPreStmt and checkEndFunction callbacks. |
773 | void checkEscapeOnReturn(const ReturnStmt *S, CheckerContext &C) const; |
774 | |
775 | ///@{ |
776 | /// Tells if a given family/call/symbol is tracked by the current checker. |
777 | /// Sets CheckKind to the kind of the checker responsible for this |
778 | /// family/call/symbol. |
779 | std::optional<CheckKind> getCheckIfTracked(AllocationFamily Family, |
780 | bool IsALeakCheck = false) const; |
781 | |
782 | std::optional<CheckKind> getCheckIfTracked(CheckerContext &C, SymbolRef Sym, |
783 | bool IsALeakCheck = false) const; |
784 | ///@} |
785 | static bool SummarizeValue(raw_ostream &os, SVal V); |
786 | static bool SummarizeRegion(ProgramStateRef State, raw_ostream &os, |
787 | const MemRegion *MR); |
788 | |
789 | void HandleNonHeapDealloc(CheckerContext &C, SVal ArgVal, SourceRange Range, |
790 | const Expr *DeallocExpr, |
791 | AllocationFamily Family) const; |
792 | |
793 | void HandleFreeAlloca(CheckerContext &C, SVal ArgVal, |
794 | SourceRange Range) const; |
795 | |
796 | void HandleMismatchedDealloc(CheckerContext &C, SourceRange Range, |
797 | const Expr *DeallocExpr, const RefState *RS, |
798 | SymbolRef Sym, bool OwnershipTransferred) const; |
799 | |
800 | void HandleOffsetFree(CheckerContext &C, SVal ArgVal, SourceRange Range, |
801 | const Expr *DeallocExpr, AllocationFamily Family, |
802 | const Expr *AllocExpr = nullptr) const; |
803 | |
804 | void HandleUseAfterFree(CheckerContext &C, SourceRange Range, |
805 | SymbolRef Sym) const; |
806 | |
807 | void HandleDoubleFree(CheckerContext &C, SourceRange Range, bool Released, |
808 | SymbolRef Sym, SymbolRef PrevSym) const; |
809 | |
810 | void HandleDoubleDelete(CheckerContext &C, SymbolRef Sym) const; |
811 | |
812 | void HandleUseZeroAlloc(CheckerContext &C, SourceRange Range, |
813 | SymbolRef Sym) const; |
814 | |
815 | void HandleFunctionPtrFree(CheckerContext &C, SVal ArgVal, SourceRange Range, |
816 | const Expr *FreeExpr, |
817 | AllocationFamily Family) const; |
818 | |
819 | /// Find the location of the allocation for Sym on the path leading to the |
820 | /// exploded node N. |
821 | static LeakInfo getAllocationSite(const ExplodedNode *N, SymbolRef Sym, |
822 | CheckerContext &C); |
823 | |
824 | void HandleLeak(SymbolRef Sym, ExplodedNode *N, CheckerContext &C) const; |
825 | |
826 | /// Test if value in ArgVal equals to value in macro `ZERO_SIZE_PTR`. |
827 | bool isArgZERO_SIZE_PTR(ProgramStateRef State, CheckerContext &C, |
828 | SVal ArgVal) const; |
829 | }; |
830 | } // end anonymous namespace |
831 | |
832 | //===----------------------------------------------------------------------===// |
833 | // Definition of NoOwnershipChangeVisitor. |
834 | //===----------------------------------------------------------------------===// |
835 | |
836 | namespace { |
837 | class NoMemOwnershipChangeVisitor final : public NoOwnershipChangeVisitor { |
838 | protected: |
839 | /// Syntactically checks whether the callee is a deallocating function. Since |
840 | /// we have no path-sensitive information on this call (we would need a |
841 | /// CallEvent instead of a CallExpr for that), its possible that a |
842 | /// deallocation function was called indirectly through a function pointer, |
843 | /// but we are not able to tell, so this is a best effort analysis. |
844 | /// See namespace `memory_passed_to_fn_call_free_through_fn_ptr` in |
845 | /// clang/test/Analysis/NewDeleteLeaks.cpp. |
846 | bool isFreeingCallAsWritten(const CallExpr &Call) const { |
847 | const auto *MallocChk = static_cast<const MallocChecker *>(&Checker); |
848 | if (MallocChk->FreeingMemFnMap.lookupAsWritten(Call) || |
849 | MallocChk->ReallocatingMemFnMap.lookupAsWritten(Call)) |
850 | return true; |
851 | |
852 | if (const auto *Func = |
853 | llvm::dyn_cast_or_null<FunctionDecl>(Val: Call.getCalleeDecl())) |
854 | return MallocChecker::isFreeingOwnershipAttrCall(Func); |
855 | |
856 | return false; |
857 | } |
858 | |
859 | bool hasResourceStateChanged(ProgramStateRef CallEnterState, |
860 | ProgramStateRef CallExitEndState) final { |
861 | return CallEnterState->get<RegionState>(key: Sym) != |
862 | CallExitEndState->get<RegionState>(key: Sym); |
863 | } |
864 | |
865 | /// Heuristically guess whether the callee intended to free memory. This is |
866 | /// done syntactically, because we are trying to argue about alternative |
867 | /// paths of execution, and as a consequence we don't have path-sensitive |
868 | /// information. |
869 | bool doesFnIntendToHandleOwnership(const Decl *Callee, |
870 | ASTContext &ACtx) final { |
871 | const FunctionDecl *FD = dyn_cast<FunctionDecl>(Val: Callee); |
872 | |
873 | // Given that the stack frame was entered, the body should always be |
874 | // theoretically obtainable. In case of body farms, the synthesized body |
875 | // is not attached to declaration, thus triggering the '!FD->hasBody()' |
876 | // branch. That said, would a synthesized body ever intend to handle |
877 | // ownership? As of today they don't. And if they did, how would we |
878 | // put notes inside it, given that it doesn't match any source locations? |
879 | if (!FD || !FD->hasBody()) |
880 | return false; |
881 | using namespace clang::ast_matchers; |
882 | |
883 | auto Matches = match(Matcher: findAll(Matcher: stmt(anyOf(cxxDeleteExpr().bind(ID: "delete" ), |
884 | callExpr().bind(ID: "call" )))), |
885 | Node: *FD->getBody(), Context&: ACtx); |
886 | for (BoundNodes Match : Matches) { |
887 | if (Match.getNodeAs<CXXDeleteExpr>(ID: "delete" )) |
888 | return true; |
889 | |
890 | if (const auto *Call = Match.getNodeAs<CallExpr>(ID: "call" )) |
891 | if (isFreeingCallAsWritten(Call: *Call)) |
892 | return true; |
893 | } |
894 | // TODO: Ownership might change with an attempt to store the allocated |
895 | // memory, not only through deallocation. Check for attempted stores as |
896 | // well. |
897 | return false; |
898 | } |
899 | |
900 | PathDiagnosticPieceRef emitNote(const ExplodedNode *N) final { |
901 | PathDiagnosticLocation L = PathDiagnosticLocation::create( |
902 | P: N->getLocation(), |
903 | SMng: N->getState()->getStateManager().getContext().getSourceManager()); |
904 | return std::make_shared<PathDiagnosticEventPiece>( |
905 | args&: L, args: "Returning without deallocating memory or storing the pointer for " |
906 | "later deallocation" ); |
907 | } |
908 | |
909 | public: |
910 | NoMemOwnershipChangeVisitor(SymbolRef Sym, const MallocChecker *Checker) |
911 | : NoOwnershipChangeVisitor(Sym, Checker) {} |
912 | |
913 | void Profile(llvm::FoldingSetNodeID &ID) const override { |
914 | static int Tag = 0; |
915 | ID.AddPointer(Ptr: &Tag); |
916 | ID.AddPointer(Ptr: Sym); |
917 | } |
918 | }; |
919 | |
920 | } // end anonymous namespace |
921 | |
922 | //===----------------------------------------------------------------------===// |
923 | // Definition of MallocBugVisitor. |
924 | //===----------------------------------------------------------------------===// |
925 | |
926 | namespace { |
927 | /// The bug visitor which allows us to print extra diagnostics along the |
928 | /// BugReport path. For example, showing the allocation site of the leaked |
929 | /// region. |
930 | class MallocBugVisitor final : public BugReporterVisitor { |
931 | protected: |
932 | enum NotificationMode { Normal, ReallocationFailed }; |
933 | |
934 | // The allocated region symbol tracked by the main analysis. |
935 | SymbolRef Sym; |
936 | |
937 | // The mode we are in, i.e. what kind of diagnostics will be emitted. |
938 | NotificationMode Mode; |
939 | |
940 | // A symbol from when the primary region should have been reallocated. |
941 | SymbolRef FailedReallocSymbol; |
942 | |
943 | // A release function stack frame in which memory was released. Used for |
944 | // miscellaneous false positive suppression. |
945 | const StackFrameContext *ReleaseFunctionLC; |
946 | |
947 | bool IsLeak; |
948 | |
949 | public: |
950 | MallocBugVisitor(SymbolRef S, bool isLeak = false) |
951 | : Sym(S), Mode(Normal), FailedReallocSymbol(nullptr), |
952 | ReleaseFunctionLC(nullptr), IsLeak(isLeak) {} |
953 | |
954 | static void *getTag() { |
955 | static int Tag = 0; |
956 | return &Tag; |
957 | } |
958 | |
959 | void Profile(llvm::FoldingSetNodeID &ID) const override { |
960 | ID.AddPointer(Ptr: getTag()); |
961 | ID.AddPointer(Ptr: Sym); |
962 | } |
963 | |
964 | /// Did not track -> allocated. Other state (released) -> allocated. |
965 | static inline bool isAllocated(const RefState *RSCurr, const RefState *RSPrev, |
966 | const Stmt *Stmt) { |
967 | return (isa_and_nonnull<CallExpr, CXXNewExpr>(Val: Stmt) && |
968 | (RSCurr && |
969 | (RSCurr->isAllocated() || RSCurr->isAllocatedOfSizeZero())) && |
970 | (!RSPrev || |
971 | !(RSPrev->isAllocated() || RSPrev->isAllocatedOfSizeZero()))); |
972 | } |
973 | |
974 | /// Did not track -> released. Other state (allocated) -> released. |
975 | /// The statement associated with the release might be missing. |
976 | static inline bool isReleased(const RefState *RSCurr, const RefState *RSPrev, |
977 | const Stmt *Stmt) { |
978 | bool IsReleased = |
979 | (RSCurr && RSCurr->isReleased()) && (!RSPrev || !RSPrev->isReleased()); |
980 | assert(!IsReleased || (isa_and_nonnull<CallExpr, CXXDeleteExpr>(Stmt)) || |
981 | (!Stmt && RSCurr->getAllocationFamily().Kind == AF_InnerBuffer)); |
982 | return IsReleased; |
983 | } |
984 | |
985 | /// Did not track -> relinquished. Other state (allocated) -> relinquished. |
986 | static inline bool isRelinquished(const RefState *RSCurr, |
987 | const RefState *RSPrev, const Stmt *Stmt) { |
988 | return ( |
989 | isa_and_nonnull<CallExpr, ObjCMessageExpr, ObjCPropertyRefExpr>(Val: Stmt) && |
990 | (RSCurr && RSCurr->isRelinquished()) && |
991 | (!RSPrev || !RSPrev->isRelinquished())); |
992 | } |
993 | |
994 | /// If the expression is not a call, and the state change is |
995 | /// released -> allocated, it must be the realloc return value |
996 | /// check. If we have to handle more cases here, it might be cleaner just |
997 | /// to track this extra bit in the state itself. |
998 | static inline bool hasReallocFailed(const RefState *RSCurr, |
999 | const RefState *RSPrev, |
1000 | const Stmt *Stmt) { |
1001 | return ((!isa_and_nonnull<CallExpr>(Val: Stmt)) && |
1002 | (RSCurr && |
1003 | (RSCurr->isAllocated() || RSCurr->isAllocatedOfSizeZero())) && |
1004 | (RSPrev && |
1005 | !(RSPrev->isAllocated() || RSPrev->isAllocatedOfSizeZero()))); |
1006 | } |
1007 | |
1008 | PathDiagnosticPieceRef VisitNode(const ExplodedNode *N, |
1009 | BugReporterContext &BRC, |
1010 | PathSensitiveBugReport &BR) override; |
1011 | |
1012 | PathDiagnosticPieceRef getEndPath(BugReporterContext &BRC, |
1013 | const ExplodedNode *EndPathNode, |
1014 | PathSensitiveBugReport &BR) override { |
1015 | if (!IsLeak) |
1016 | return nullptr; |
1017 | |
1018 | PathDiagnosticLocation L = BR.getLocation(); |
1019 | // Do not add the statement itself as a range in case of leak. |
1020 | return std::make_shared<PathDiagnosticEventPiece>(args&: L, args: BR.getDescription(), |
1021 | args: false); |
1022 | } |
1023 | |
1024 | private: |
1025 | class StackHintGeneratorForReallocationFailed |
1026 | : public StackHintGeneratorForSymbol { |
1027 | public: |
1028 | StackHintGeneratorForReallocationFailed(SymbolRef S, StringRef M) |
1029 | : StackHintGeneratorForSymbol(S, M) {} |
1030 | |
1031 | std::string getMessageForArg(const Expr *ArgE, unsigned ArgIndex) override { |
1032 | // Printed parameters start at 1, not 0. |
1033 | ++ArgIndex; |
1034 | |
1035 | SmallString<200> buf; |
1036 | llvm::raw_svector_ostream os(buf); |
1037 | |
1038 | os << "Reallocation of " << ArgIndex << llvm::getOrdinalSuffix(Val: ArgIndex) |
1039 | << " parameter failed" ; |
1040 | |
1041 | return std::string(os.str()); |
1042 | } |
1043 | |
1044 | std::string getMessageForReturn(const CallExpr *CallExpr) override { |
1045 | return "Reallocation of returned value failed" ; |
1046 | } |
1047 | }; |
1048 | }; |
1049 | } // end anonymous namespace |
1050 | |
1051 | // A map from the freed symbol to the symbol representing the return value of |
1052 | // the free function. |
1053 | REGISTER_MAP_WITH_PROGRAMSTATE(FreeReturnValue, SymbolRef, SymbolRef) |
1054 | |
1055 | namespace { |
1056 | class StopTrackingCallback final : public SymbolVisitor { |
1057 | ProgramStateRef state; |
1058 | |
1059 | public: |
1060 | StopTrackingCallback(ProgramStateRef st) : state(std::move(st)) {} |
1061 | ProgramStateRef getState() const { return state; } |
1062 | |
1063 | bool VisitSymbol(SymbolRef sym) override { |
1064 | state = state->remove<RegionState>(K: sym); |
1065 | return true; |
1066 | } |
1067 | }; |
1068 | } // end anonymous namespace |
1069 | |
1070 | static bool isStandardNew(const FunctionDecl *FD) { |
1071 | if (!FD) |
1072 | return false; |
1073 | |
1074 | OverloadedOperatorKind Kind = FD->getOverloadedOperator(); |
1075 | if (Kind != OO_New && Kind != OO_Array_New) |
1076 | return false; |
1077 | |
1078 | // This is standard if and only if it's not defined in a user file. |
1079 | SourceLocation L = FD->getLocation(); |
1080 | // If the header for operator delete is not included, it's still defined |
1081 | // in an invalid source location. Check to make sure we don't crash. |
1082 | return !L.isValid() || |
1083 | FD->getASTContext().getSourceManager().isInSystemHeader(Loc: L); |
1084 | } |
1085 | |
1086 | static bool isStandardDelete(const FunctionDecl *FD) { |
1087 | if (!FD) |
1088 | return false; |
1089 | |
1090 | OverloadedOperatorKind Kind = FD->getOverloadedOperator(); |
1091 | if (Kind != OO_Delete && Kind != OO_Array_Delete) |
1092 | return false; |
1093 | |
1094 | bool HasBody = FD->hasBody(); // Prefer using the definition. |
1095 | |
1096 | // This is standard if and only if it's not defined in a user file. |
1097 | SourceLocation L = FD->getLocation(); |
1098 | |
1099 | // If the header for operator delete is not included, it's still defined |
1100 | // in an invalid source location. Check to make sure we don't crash. |
1101 | const auto &SM = FD->getASTContext().getSourceManager(); |
1102 | return L.isInvalid() || (!HasBody && SM.isInSystemHeader(Loc: L)); |
1103 | } |
1104 | |
1105 | //===----------------------------------------------------------------------===// |
1106 | // Methods of MallocChecker and MallocBugVisitor. |
1107 | //===----------------------------------------------------------------------===// |
1108 | |
1109 | bool MallocChecker::isFreeingOwnershipAttrCall(const CallEvent &Call) { |
1110 | const auto *Func = dyn_cast_or_null<FunctionDecl>(Val: Call.getDecl()); |
1111 | |
1112 | return Func && isFreeingOwnershipAttrCall(Func); |
1113 | } |
1114 | |
1115 | bool MallocChecker::isFreeingOwnershipAttrCall(const FunctionDecl *Func) { |
1116 | if (Func->hasAttrs()) { |
1117 | for (const auto *I : Func->specific_attrs<OwnershipAttr>()) { |
1118 | OwnershipAttr::OwnershipKind OwnKind = I->getOwnKind(); |
1119 | if (OwnKind == OwnershipAttr::Takes || OwnKind == OwnershipAttr::Holds) |
1120 | return true; |
1121 | } |
1122 | } |
1123 | return false; |
1124 | } |
1125 | |
1126 | bool MallocChecker::isFreeingCall(const CallEvent &Call) const { |
1127 | if (FreeingMemFnMap.lookup(Call) || ReallocatingMemFnMap.lookup(Call)) |
1128 | return true; |
1129 | |
1130 | return isFreeingOwnershipAttrCall(Call); |
1131 | } |
1132 | |
1133 | bool MallocChecker::isAllocatingOwnershipAttrCall(const CallEvent &Call) { |
1134 | const auto *Func = dyn_cast_or_null<FunctionDecl>(Val: Call.getDecl()); |
1135 | |
1136 | return Func && isAllocatingOwnershipAttrCall(Func); |
1137 | } |
1138 | |
1139 | bool MallocChecker::isAllocatingOwnershipAttrCall(const FunctionDecl *Func) { |
1140 | for (const auto *I : Func->specific_attrs<OwnershipAttr>()) { |
1141 | if (I->getOwnKind() == OwnershipAttr::Returns) |
1142 | return true; |
1143 | } |
1144 | |
1145 | return false; |
1146 | } |
1147 | |
1148 | bool MallocChecker::isMemCall(const CallEvent &Call) const { |
1149 | if (FreeingMemFnMap.lookup(Call) || AllocatingMemFnMap.lookup(Call) || |
1150 | AllocaMemFnMap.lookup(Call) || ReallocatingMemFnMap.lookup(Call)) |
1151 | return true; |
1152 | |
1153 | if (!ShouldIncludeOwnershipAnnotatedFunctions) |
1154 | return false; |
1155 | |
1156 | const auto *Func = dyn_cast<FunctionDecl>(Val: Call.getDecl()); |
1157 | return Func && Func->hasAttr<OwnershipAttr>(); |
1158 | } |
1159 | |
1160 | std::optional<ProgramStateRef> |
1161 | MallocChecker::performKernelMalloc(const CallEvent &Call, CheckerContext &C, |
1162 | const ProgramStateRef &State) const { |
1163 | // 3-argument malloc(), as commonly used in {Free,Net,Open}BSD Kernels: |
1164 | // |
1165 | // void *malloc(unsigned long size, struct malloc_type *mtp, int flags); |
1166 | // |
1167 | // One of the possible flags is M_ZERO, which means 'give me back an |
1168 | // allocation which is already zeroed', like calloc. |
1169 | |
1170 | // 2-argument kmalloc(), as used in the Linux kernel: |
1171 | // |
1172 | // void *kmalloc(size_t size, gfp_t flags); |
1173 | // |
1174 | // Has the similar flag value __GFP_ZERO. |
1175 | |
1176 | // This logic is largely cloned from O_CREAT in UnixAPIChecker, maybe some |
1177 | // code could be shared. |
1178 | |
1179 | ASTContext &Ctx = C.getASTContext(); |
1180 | llvm::Triple::OSType OS = Ctx.getTargetInfo().getTriple().getOS(); |
1181 | |
1182 | if (!KernelZeroFlagVal) { |
1183 | switch (OS) { |
1184 | case llvm::Triple::FreeBSD: |
1185 | KernelZeroFlagVal = 0x0100; |
1186 | break; |
1187 | case llvm::Triple::NetBSD: |
1188 | KernelZeroFlagVal = 0x0002; |
1189 | break; |
1190 | case llvm::Triple::OpenBSD: |
1191 | KernelZeroFlagVal = 0x0008; |
1192 | break; |
1193 | case llvm::Triple::Linux: |
1194 | // __GFP_ZERO |
1195 | KernelZeroFlagVal = 0x8000; |
1196 | break; |
1197 | default: |
1198 | // FIXME: We need a more general way of getting the M_ZERO value. |
1199 | // See also: O_CREAT in UnixAPIChecker.cpp. |
1200 | |
1201 | // Fall back to normal malloc behavior on platforms where we don't |
1202 | // know M_ZERO. |
1203 | return std::nullopt; |
1204 | } |
1205 | } |
1206 | |
1207 | // We treat the last argument as the flags argument, and callers fall-back to |
1208 | // normal malloc on a None return. This works for the FreeBSD kernel malloc |
1209 | // as well as Linux kmalloc. |
1210 | if (Call.getNumArgs() < 2) |
1211 | return std::nullopt; |
1212 | |
1213 | const Expr *FlagsEx = Call.getArgExpr(Index: Call.getNumArgs() - 1); |
1214 | const SVal V = C.getSVal(S: FlagsEx); |
1215 | if (!isa<NonLoc>(Val: V)) { |
1216 | // The case where 'V' can be a location can only be due to a bad header, |
1217 | // so in this case bail out. |
1218 | return std::nullopt; |
1219 | } |
1220 | |
1221 | NonLoc Flags = V.castAs<NonLoc>(); |
1222 | NonLoc ZeroFlag = C.getSValBuilder() |
1223 | .makeIntVal(integer: *KernelZeroFlagVal, type: FlagsEx->getType()) |
1224 | .castAs<NonLoc>(); |
1225 | SVal MaskedFlagsUC = C.getSValBuilder().evalBinOpNN(state: State, op: BO_And, |
1226 | lhs: Flags, rhs: ZeroFlag, |
1227 | resultTy: FlagsEx->getType()); |
1228 | if (MaskedFlagsUC.isUnknownOrUndef()) |
1229 | return std::nullopt; |
1230 | DefinedSVal MaskedFlags = MaskedFlagsUC.castAs<DefinedSVal>(); |
1231 | |
1232 | // Check if maskedFlags is non-zero. |
1233 | ProgramStateRef TrueState, FalseState; |
1234 | std::tie(args&: TrueState, args&: FalseState) = State->assume(Cond: MaskedFlags); |
1235 | |
1236 | // If M_ZERO is set, treat this like calloc (initialized). |
1237 | if (TrueState && !FalseState) { |
1238 | SVal ZeroVal = C.getSValBuilder().makeZeroVal(type: Ctx.CharTy); |
1239 | return MallocMemAux(C, Call, SizeEx: Call.getArgExpr(Index: 0), Init: ZeroVal, State: TrueState, |
1240 | Family: AllocationFamily(AF_Malloc)); |
1241 | } |
1242 | |
1243 | return std::nullopt; |
1244 | } |
1245 | |
1246 | SVal MallocChecker::evalMulForBufferSize(CheckerContext &C, const Expr *Blocks, |
1247 | const Expr *BlockBytes) { |
1248 | SValBuilder &SB = C.getSValBuilder(); |
1249 | SVal BlocksVal = C.getSVal(S: Blocks); |
1250 | SVal BlockBytesVal = C.getSVal(S: BlockBytes); |
1251 | ProgramStateRef State = C.getState(); |
1252 | SVal TotalSize = SB.evalBinOp(state: State, op: BO_Mul, lhs: BlocksVal, rhs: BlockBytesVal, |
1253 | type: SB.getContext().getSizeType()); |
1254 | return TotalSize; |
1255 | } |
1256 | |
1257 | void MallocChecker::checkBasicAlloc(ProgramStateRef State, |
1258 | const CallEvent &Call, |
1259 | CheckerContext &C) const { |
1260 | State = MallocMemAux(C, Call, SizeEx: Call.getArgExpr(Index: 0), Init: UndefinedVal(), State, |
1261 | Family: AllocationFamily(AF_Malloc)); |
1262 | State = ProcessZeroAllocCheck(C, Call, IndexOfSizeArg: 0, State); |
1263 | C.addTransition(State); |
1264 | } |
1265 | |
1266 | void MallocChecker::checkKernelMalloc(ProgramStateRef State, |
1267 | const CallEvent &Call, |
1268 | CheckerContext &C) const { |
1269 | std::optional<ProgramStateRef> MaybeState = |
1270 | performKernelMalloc(Call, C, State); |
1271 | if (MaybeState) |
1272 | State = *MaybeState; |
1273 | else |
1274 | State = MallocMemAux(C, Call, SizeEx: Call.getArgExpr(Index: 0), Init: UndefinedVal(), State, |
1275 | Family: AllocationFamily(AF_Malloc)); |
1276 | C.addTransition(State); |
1277 | } |
1278 | |
1279 | static bool isStandardRealloc(const CallEvent &Call) { |
1280 | const FunctionDecl *FD = dyn_cast<FunctionDecl>(Val: Call.getDecl()); |
1281 | assert(FD); |
1282 | ASTContext &AC = FD->getASTContext(); |
1283 | |
1284 | return FD->getDeclaredReturnType().getDesugaredType(Context: AC) == AC.VoidPtrTy && |
1285 | FD->getParamDecl(i: 0)->getType().getDesugaredType(Context: AC) == AC.VoidPtrTy && |
1286 | FD->getParamDecl(i: 1)->getType().getDesugaredType(Context: AC) == |
1287 | AC.getSizeType(); |
1288 | } |
1289 | |
1290 | static bool isGRealloc(const CallEvent &Call) { |
1291 | const FunctionDecl *FD = dyn_cast<FunctionDecl>(Val: Call.getDecl()); |
1292 | assert(FD); |
1293 | ASTContext &AC = FD->getASTContext(); |
1294 | |
1295 | return FD->getDeclaredReturnType().getDesugaredType(Context: AC) == AC.VoidPtrTy && |
1296 | FD->getParamDecl(i: 0)->getType().getDesugaredType(Context: AC) == AC.VoidPtrTy && |
1297 | FD->getParamDecl(i: 1)->getType().getDesugaredType(Context: AC) == |
1298 | AC.UnsignedLongTy; |
1299 | } |
1300 | |
1301 | void MallocChecker::checkRealloc(ProgramStateRef State, const CallEvent &Call, |
1302 | CheckerContext &C, |
1303 | bool ShouldFreeOnFail) const { |
1304 | // Ignore calls to functions whose type does not match the expected type of |
1305 | // either the standard realloc or g_realloc from GLib. |
1306 | // FIXME: Should we perform this kind of checking consistently for each |
1307 | // function? If yes, then perhaps extend the `CallDescription` interface to |
1308 | // handle this. |
1309 | if (!isStandardRealloc(Call) && !isGRealloc(Call)) |
1310 | return; |
1311 | |
1312 | State = ReallocMemAux(C, Call, ShouldFreeOnFail, State, |
1313 | Family: AllocationFamily(AF_Malloc)); |
1314 | State = ProcessZeroAllocCheck(C, Call, IndexOfSizeArg: 1, State); |
1315 | C.addTransition(State); |
1316 | } |
1317 | |
1318 | void MallocChecker::checkCalloc(ProgramStateRef State, const CallEvent &Call, |
1319 | CheckerContext &C) const { |
1320 | State = CallocMem(C, Call, State); |
1321 | State = ProcessZeroAllocCheck(C, Call, IndexOfSizeArg: 0, State); |
1322 | State = ProcessZeroAllocCheck(C, Call, IndexOfSizeArg: 1, State); |
1323 | C.addTransition(State); |
1324 | } |
1325 | |
1326 | void MallocChecker::checkFree(ProgramStateRef State, const CallEvent &Call, |
1327 | CheckerContext &C) const { |
1328 | bool IsKnownToBeAllocatedMemory = false; |
1329 | if (suppressDeallocationsInSuspiciousContexts(Call, C)) |
1330 | return; |
1331 | State = FreeMemAux(C, Call, State, Num: 0, Hold: false, IsKnownToBeAllocated&: IsKnownToBeAllocatedMemory, |
1332 | Family: AllocationFamily(AF_Malloc)); |
1333 | C.addTransition(State); |
1334 | } |
1335 | |
1336 | void MallocChecker::checkAlloca(ProgramStateRef State, const CallEvent &Call, |
1337 | CheckerContext &C) const { |
1338 | State = MallocMemAux(C, Call, SizeEx: Call.getArgExpr(Index: 0), Init: UndefinedVal(), State, |
1339 | Family: AllocationFamily(AF_Alloca)); |
1340 | State = ProcessZeroAllocCheck(C, Call, IndexOfSizeArg: 0, State); |
1341 | C.addTransition(State); |
1342 | } |
1343 | |
1344 | void MallocChecker::checkStrdup(ProgramStateRef State, const CallEvent &Call, |
1345 | CheckerContext &C) const { |
1346 | const auto *CE = dyn_cast_or_null<CallExpr>(Val: Call.getOriginExpr()); |
1347 | if (!CE) |
1348 | return; |
1349 | State = MallocMemAux(C, Call, Size: UnknownVal(), Init: UnknownVal(), State, |
1350 | Family: AllocationFamily(AF_Malloc)); |
1351 | |
1352 | C.addTransition(State); |
1353 | } |
1354 | |
1355 | void MallocChecker::checkIfNameIndex(ProgramStateRef State, |
1356 | const CallEvent &Call, |
1357 | CheckerContext &C) const { |
1358 | // Should we model this differently? We can allocate a fixed number of |
1359 | // elements with zeros in the last one. |
1360 | State = MallocMemAux(C, Call, Size: UnknownVal(), Init: UnknownVal(), State, |
1361 | Family: AllocationFamily(AF_IfNameIndex)); |
1362 | |
1363 | C.addTransition(State); |
1364 | } |
1365 | |
1366 | void MallocChecker::checkIfFreeNameIndex(ProgramStateRef State, |
1367 | const CallEvent &Call, |
1368 | CheckerContext &C) const { |
1369 | bool IsKnownToBeAllocatedMemory = false; |
1370 | State = FreeMemAux(C, Call, State, Num: 0, Hold: false, IsKnownToBeAllocated&: IsKnownToBeAllocatedMemory, |
1371 | Family: AllocationFamily(AF_IfNameIndex)); |
1372 | C.addTransition(State); |
1373 | } |
1374 | |
1375 | static const Expr *getPlacementNewBufferArg(const CallExpr *CE, |
1376 | const FunctionDecl *FD) { |
1377 | // Checking for signature: |
1378 | // void* operator new ( std::size_t count, void* ptr ); |
1379 | // void* operator new[]( std::size_t count, void* ptr ); |
1380 | if (CE->getNumArgs() != 2 || (FD->getOverloadedOperator() != OO_New && |
1381 | FD->getOverloadedOperator() != OO_Array_New)) |
1382 | return nullptr; |
1383 | auto BuffType = FD->getParamDecl(i: 1)->getType(); |
1384 | if (BuffType.isNull() || !BuffType->isVoidPointerType()) |
1385 | return nullptr; |
1386 | return CE->getArg(Arg: 1); |
1387 | } |
1388 | |
1389 | void MallocChecker::checkCXXNewOrCXXDelete(ProgramStateRef State, |
1390 | const CallEvent &Call, |
1391 | CheckerContext &C) const { |
1392 | bool IsKnownToBeAllocatedMemory = false; |
1393 | const auto *CE = dyn_cast_or_null<CallExpr>(Val: Call.getOriginExpr()); |
1394 | if (!CE) |
1395 | return; |
1396 | |
1397 | assert(isStandardNewDelete(Call)); |
1398 | |
1399 | // Process direct calls to operator new/new[]/delete/delete[] functions |
1400 | // as distinct from new/new[]/delete/delete[] expressions that are |
1401 | // processed by the checkPostStmt callbacks for CXXNewExpr and |
1402 | // CXXDeleteExpr. |
1403 | const FunctionDecl *FD = C.getCalleeDecl(CE); |
1404 | if (const auto *BufArg = getPlacementNewBufferArg(CE, FD)) { |
1405 | // Placement new does not allocate memory |
1406 | auto RetVal = State->getSVal(Ex: BufArg, LCtx: Call.getLocationContext()); |
1407 | State = State->BindExpr(S: CE, LCtx: C.getLocationContext(), V: RetVal); |
1408 | C.addTransition(State); |
1409 | return; |
1410 | } |
1411 | |
1412 | switch (FD->getOverloadedOperator()) { |
1413 | case OO_New: |
1414 | State = MallocMemAux(C, Call, SizeEx: CE->getArg(Arg: 0), Init: UndefinedVal(), State, |
1415 | Family: AllocationFamily(AF_CXXNew)); |
1416 | State = ProcessZeroAllocCheck(C, Call, IndexOfSizeArg: 0, State); |
1417 | break; |
1418 | case OO_Array_New: |
1419 | State = MallocMemAux(C, Call, SizeEx: CE->getArg(Arg: 0), Init: UndefinedVal(), State, |
1420 | Family: AllocationFamily(AF_CXXNewArray)); |
1421 | State = ProcessZeroAllocCheck(C, Call, IndexOfSizeArg: 0, State); |
1422 | break; |
1423 | case OO_Delete: |
1424 | State = FreeMemAux(C, Call, State, Num: 0, Hold: false, IsKnownToBeAllocated&: IsKnownToBeAllocatedMemory, |
1425 | Family: AllocationFamily(AF_CXXNew)); |
1426 | break; |
1427 | case OO_Array_Delete: |
1428 | State = FreeMemAux(C, Call, State, Num: 0, Hold: false, IsKnownToBeAllocated&: IsKnownToBeAllocatedMemory, |
1429 | Family: AllocationFamily(AF_CXXNewArray)); |
1430 | break; |
1431 | default: |
1432 | assert(false && "not a new/delete operator" ); |
1433 | return; |
1434 | } |
1435 | |
1436 | C.addTransition(State); |
1437 | } |
1438 | |
1439 | void MallocChecker::checkGMalloc0(ProgramStateRef State, const CallEvent &Call, |
1440 | CheckerContext &C) const { |
1441 | SValBuilder &svalBuilder = C.getSValBuilder(); |
1442 | SVal zeroVal = svalBuilder.makeZeroVal(type: svalBuilder.getContext().CharTy); |
1443 | State = MallocMemAux(C, Call, SizeEx: Call.getArgExpr(Index: 0), Init: zeroVal, State, |
1444 | Family: AllocationFamily(AF_Malloc)); |
1445 | State = ProcessZeroAllocCheck(C, Call, IndexOfSizeArg: 0, State); |
1446 | C.addTransition(State); |
1447 | } |
1448 | |
1449 | void MallocChecker::checkGMemdup(ProgramStateRef State, const CallEvent &Call, |
1450 | CheckerContext &C) const { |
1451 | State = MallocMemAux(C, Call, SizeEx: Call.getArgExpr(Index: 1), Init: UnknownVal(), State, |
1452 | Family: AllocationFamily(AF_Malloc)); |
1453 | State = ProcessZeroAllocCheck(C, Call, IndexOfSizeArg: 1, State); |
1454 | C.addTransition(State); |
1455 | } |
1456 | |
1457 | void MallocChecker::checkGMallocN(ProgramStateRef State, const CallEvent &Call, |
1458 | CheckerContext &C) const { |
1459 | SVal Init = UndefinedVal(); |
1460 | SVal TotalSize = evalMulForBufferSize(C, Blocks: Call.getArgExpr(Index: 0), BlockBytes: Call.getArgExpr(Index: 1)); |
1461 | State = MallocMemAux(C, Call, Size: TotalSize, Init, State, |
1462 | Family: AllocationFamily(AF_Malloc)); |
1463 | State = ProcessZeroAllocCheck(C, Call, IndexOfSizeArg: 0, State); |
1464 | State = ProcessZeroAllocCheck(C, Call, IndexOfSizeArg: 1, State); |
1465 | C.addTransition(State); |
1466 | } |
1467 | |
1468 | void MallocChecker::checkGMallocN0(ProgramStateRef State, const CallEvent &Call, |
1469 | CheckerContext &C) const { |
1470 | SValBuilder &SB = C.getSValBuilder(); |
1471 | SVal Init = SB.makeZeroVal(type: SB.getContext().CharTy); |
1472 | SVal TotalSize = evalMulForBufferSize(C, Blocks: Call.getArgExpr(Index: 0), BlockBytes: Call.getArgExpr(Index: 1)); |
1473 | State = MallocMemAux(C, Call, Size: TotalSize, Init, State, |
1474 | Family: AllocationFamily(AF_Malloc)); |
1475 | State = ProcessZeroAllocCheck(C, Call, IndexOfSizeArg: 0, State); |
1476 | State = ProcessZeroAllocCheck(C, Call, IndexOfSizeArg: 1, State); |
1477 | C.addTransition(State); |
1478 | } |
1479 | |
1480 | static bool isFromStdNamespace(const CallEvent &Call) { |
1481 | const Decl *FD = Call.getDecl(); |
1482 | assert(FD && "a CallDescription cannot match a call without a Decl" ); |
1483 | return FD->isInStdNamespace(); |
1484 | } |
1485 | |
1486 | void MallocChecker::preGetDelimOrGetLine(ProgramStateRef State, |
1487 | const CallEvent &Call, |
1488 | CheckerContext &C) const { |
1489 | // Discard calls to the C++ standard library function std::getline(), which |
1490 | // is completely unrelated to the POSIX getline() that we're checking. |
1491 | if (isFromStdNamespace(Call)) |
1492 | return; |
1493 | |
1494 | const auto LinePtr = getPointeeVal(PtrSVal: Call.getArgSVal(Index: 0), State); |
1495 | if (!LinePtr) |
1496 | return; |
1497 | |
1498 | // FreeMemAux takes IsKnownToBeAllocated as an output parameter, and it will |
1499 | // be true after the call if the symbol was registered by this checker. |
1500 | // We do not need this value here, as FreeMemAux will take care |
1501 | // of reporting any violation of the preconditions. |
1502 | bool IsKnownToBeAllocated = false; |
1503 | State = FreeMemAux(C, ArgExpr: Call.getArgExpr(Index: 0), Call, State, Hold: false, |
1504 | IsKnownToBeAllocated, Family: AllocationFamily(AF_Malloc), ReturnsNullOnFailure: false, |
1505 | ArgValOpt: LinePtr); |
1506 | if (State) |
1507 | C.addTransition(State); |
1508 | } |
1509 | |
1510 | void MallocChecker::checkGetDelimOrGetLine(ProgramStateRef State, |
1511 | const CallEvent &Call, |
1512 | CheckerContext &C) const { |
1513 | // Discard calls to the C++ standard library function std::getline(), which |
1514 | // is completely unrelated to the POSIX getline() that we're checking. |
1515 | if (isFromStdNamespace(Call)) |
1516 | return; |
1517 | |
1518 | // Handle the post-conditions of getline and getdelim: |
1519 | // Register the new conjured value as an allocated buffer. |
1520 | const CallExpr *CE = dyn_cast_or_null<CallExpr>(Val: Call.getOriginExpr()); |
1521 | if (!CE) |
1522 | return; |
1523 | |
1524 | const auto LinePtrOpt = getPointeeVal(PtrSVal: Call.getArgSVal(Index: 0), State); |
1525 | const auto SizeOpt = getPointeeVal(PtrSVal: Call.getArgSVal(Index: 1), State); |
1526 | if (!LinePtrOpt || !SizeOpt || LinePtrOpt->isUnknownOrUndef() || |
1527 | SizeOpt->isUnknownOrUndef()) |
1528 | return; |
1529 | |
1530 | const auto LinePtr = LinePtrOpt->getAs<DefinedSVal>(); |
1531 | const auto Size = SizeOpt->getAs<DefinedSVal>(); |
1532 | const MemRegion *LinePtrReg = LinePtr->getAsRegion(); |
1533 | if (!LinePtrReg) |
1534 | return; |
1535 | |
1536 | State = setDynamicExtent(State, MR: LinePtrReg, Extent: *Size); |
1537 | C.addTransition(State: MallocUpdateRefState(C, E: CE, State, |
1538 | Family: AllocationFamily(AF_Malloc), RetVal: *LinePtr)); |
1539 | } |
1540 | |
1541 | void MallocChecker::checkReallocN(ProgramStateRef State, const CallEvent &Call, |
1542 | CheckerContext &C) const { |
1543 | State = ReallocMemAux(C, Call, /*ShouldFreeOnFail=*/false, State, |
1544 | Family: AllocationFamily(AF_Malloc), |
1545 | /*SuffixWithN=*/true); |
1546 | State = ProcessZeroAllocCheck(C, Call, IndexOfSizeArg: 1, State); |
1547 | State = ProcessZeroAllocCheck(C, Call, IndexOfSizeArg: 2, State); |
1548 | C.addTransition(State); |
1549 | } |
1550 | |
1551 | void MallocChecker::checkOwnershipAttr(ProgramStateRef State, |
1552 | const CallEvent &Call, |
1553 | CheckerContext &C) const { |
1554 | const auto *CE = dyn_cast_or_null<CallExpr>(Val: Call.getOriginExpr()); |
1555 | if (!CE) |
1556 | return; |
1557 | const FunctionDecl *FD = C.getCalleeDecl(CE); |
1558 | if (!FD) |
1559 | return; |
1560 | if (ShouldIncludeOwnershipAnnotatedFunctions || |
1561 | ChecksEnabled[CK_MismatchedDeallocatorChecker]) { |
1562 | // Check all the attributes, if there are any. |
1563 | // There can be multiple of these attributes. |
1564 | if (FD->hasAttrs()) |
1565 | for (const auto *I : FD->specific_attrs<OwnershipAttr>()) { |
1566 | switch (I->getOwnKind()) { |
1567 | case OwnershipAttr::Returns: |
1568 | State = MallocMemReturnsAttr(C, Call, Att: I, State); |
1569 | break; |
1570 | case OwnershipAttr::Takes: |
1571 | case OwnershipAttr::Holds: |
1572 | State = FreeMemAttr(C, Call, Att: I, State); |
1573 | break; |
1574 | } |
1575 | } |
1576 | } |
1577 | C.addTransition(State); |
1578 | } |
1579 | |
1580 | bool MallocChecker::evalCall(const CallEvent &Call, CheckerContext &C) const { |
1581 | if (!Call.getOriginExpr()) |
1582 | return false; |
1583 | |
1584 | ProgramStateRef State = C.getState(); |
1585 | |
1586 | if (const CheckFn *Callback = FreeingMemFnMap.lookup(Call)) { |
1587 | (*Callback)(this, State, Call, C); |
1588 | return true; |
1589 | } |
1590 | |
1591 | if (const CheckFn *Callback = AllocatingMemFnMap.lookup(Call)) { |
1592 | State = MallocBindRetVal(C, Call, State, isAlloca: false); |
1593 | (*Callback)(this, State, Call, C); |
1594 | return true; |
1595 | } |
1596 | |
1597 | if (const CheckFn *Callback = ReallocatingMemFnMap.lookup(Call)) { |
1598 | State = MallocBindRetVal(C, Call, State, isAlloca: false); |
1599 | (*Callback)(this, State, Call, C); |
1600 | return true; |
1601 | } |
1602 | |
1603 | if (isStandardNew(Call)) { |
1604 | State = MallocBindRetVal(C, Call, State, isAlloca: false); |
1605 | checkCXXNewOrCXXDelete(State, Call, C); |
1606 | return true; |
1607 | } |
1608 | |
1609 | if (isStandardDelete(Call)) { |
1610 | checkCXXNewOrCXXDelete(State, Call, C); |
1611 | return true; |
1612 | } |
1613 | |
1614 | if (const CheckFn *Callback = AllocaMemFnMap.lookup(Call)) { |
1615 | State = MallocBindRetVal(C, Call, State, isAlloca: true); |
1616 | (*Callback)(this, State, Call, C); |
1617 | return true; |
1618 | } |
1619 | |
1620 | if (isFreeingOwnershipAttrCall(Call)) { |
1621 | checkOwnershipAttr(State, Call, C); |
1622 | return true; |
1623 | } |
1624 | |
1625 | if (isAllocatingOwnershipAttrCall(Call)) { |
1626 | State = MallocBindRetVal(C, Call, State, isAlloca: false); |
1627 | checkOwnershipAttr(State, Call, C); |
1628 | return true; |
1629 | } |
1630 | |
1631 | return false; |
1632 | } |
1633 | |
1634 | // Performs a 0-sized allocations check. |
1635 | ProgramStateRef MallocChecker::ProcessZeroAllocCheck( |
1636 | CheckerContext &C, const CallEvent &Call, const unsigned IndexOfSizeArg, |
1637 | ProgramStateRef State, std::optional<SVal> RetVal) { |
1638 | if (!State) |
1639 | return nullptr; |
1640 | |
1641 | const Expr *Arg = nullptr; |
1642 | |
1643 | if (const CallExpr *CE = dyn_cast<CallExpr>(Val: Call.getOriginExpr())) { |
1644 | Arg = CE->getArg(Arg: IndexOfSizeArg); |
1645 | } else if (const CXXNewExpr *NE = |
1646 | dyn_cast<CXXNewExpr>(Val: Call.getOriginExpr())) { |
1647 | if (NE->isArray()) { |
1648 | Arg = *NE->getArraySize(); |
1649 | } else { |
1650 | return State; |
1651 | } |
1652 | } else { |
1653 | assert(false && "not a CallExpr or CXXNewExpr" ); |
1654 | return nullptr; |
1655 | } |
1656 | |
1657 | if (!RetVal) |
1658 | RetVal = State->getSVal(Ex: Call.getOriginExpr(), LCtx: C.getLocationContext()); |
1659 | |
1660 | assert(Arg); |
1661 | |
1662 | auto DefArgVal = |
1663 | State->getSVal(Ex: Arg, LCtx: Call.getLocationContext()).getAs<DefinedSVal>(); |
1664 | |
1665 | if (!DefArgVal) |
1666 | return State; |
1667 | |
1668 | // Check if the allocation size is 0. |
1669 | ProgramStateRef TrueState, FalseState; |
1670 | SValBuilder &SvalBuilder = State->getStateManager().getSValBuilder(); |
1671 | DefinedSVal Zero = |
1672 | SvalBuilder.makeZeroVal(type: Arg->getType()).castAs<DefinedSVal>(); |
1673 | |
1674 | std::tie(args&: TrueState, args&: FalseState) = |
1675 | State->assume(Cond: SvalBuilder.evalEQ(state: State, lhs: *DefArgVal, rhs: Zero)); |
1676 | |
1677 | if (TrueState && !FalseState) { |
1678 | SymbolRef Sym = RetVal->getAsLocSymbol(); |
1679 | if (!Sym) |
1680 | return State; |
1681 | |
1682 | const RefState *RS = State->get<RegionState>(key: Sym); |
1683 | if (RS) { |
1684 | if (RS->isAllocated()) |
1685 | return TrueState->set<RegionState>( |
1686 | K: Sym, E: RefState::getAllocatedOfSizeZero(RS)); |
1687 | return State; |
1688 | } |
1689 | // Case of zero-size realloc. Historically 'realloc(ptr, 0)' is treated as |
1690 | // 'free(ptr)' and the returned value from 'realloc(ptr, 0)' is not |
1691 | // tracked. Add zero-reallocated Sym to the state to catch references |
1692 | // to zero-allocated memory. |
1693 | return TrueState->add<ReallocSizeZeroSymbols>(K: Sym); |
1694 | } |
1695 | |
1696 | // Assume the value is non-zero going forward. |
1697 | assert(FalseState); |
1698 | return FalseState; |
1699 | } |
1700 | |
1701 | static QualType getDeepPointeeType(QualType T) { |
1702 | QualType Result = T, PointeeType = T->getPointeeType(); |
1703 | while (!PointeeType.isNull()) { |
1704 | Result = PointeeType; |
1705 | PointeeType = PointeeType->getPointeeType(); |
1706 | } |
1707 | return Result; |
1708 | } |
1709 | |
1710 | /// \returns true if the constructor invoked by \p NE has an argument of a |
1711 | /// pointer/reference to a record type. |
1712 | static bool hasNonTrivialConstructorCall(const CXXNewExpr *NE) { |
1713 | |
1714 | const CXXConstructExpr *ConstructE = NE->getConstructExpr(); |
1715 | if (!ConstructE) |
1716 | return false; |
1717 | |
1718 | if (!NE->getAllocatedType()->getAsCXXRecordDecl()) |
1719 | return false; |
1720 | |
1721 | const CXXConstructorDecl *CtorD = ConstructE->getConstructor(); |
1722 | |
1723 | // Iterate over the constructor parameters. |
1724 | for (const auto *CtorParam : CtorD->parameters()) { |
1725 | |
1726 | QualType CtorParamPointeeT = CtorParam->getType()->getPointeeType(); |
1727 | if (CtorParamPointeeT.isNull()) |
1728 | continue; |
1729 | |
1730 | CtorParamPointeeT = getDeepPointeeType(T: CtorParamPointeeT); |
1731 | |
1732 | if (CtorParamPointeeT->getAsCXXRecordDecl()) |
1733 | return true; |
1734 | } |
1735 | |
1736 | return false; |
1737 | } |
1738 | |
1739 | ProgramStateRef |
1740 | MallocChecker::processNewAllocation(const CXXAllocatorCall &Call, |
1741 | CheckerContext &C, |
1742 | AllocationFamily Family) const { |
1743 | if (!isStandardNewDelete(FD: Call)) |
1744 | return nullptr; |
1745 | |
1746 | const CXXNewExpr *NE = Call.getOriginExpr(); |
1747 | const ParentMap &PM = C.getLocationContext()->getParentMap(); |
1748 | ProgramStateRef State = C.getState(); |
1749 | |
1750 | // Non-trivial constructors have a chance to escape 'this', but marking all |
1751 | // invocations of trivial constructors as escaped would cause too great of |
1752 | // reduction of true positives, so let's just do that for constructors that |
1753 | // have an argument of a pointer-to-record type. |
1754 | if (!PM.isConsumedExpr(E: NE) && hasNonTrivialConstructorCall(NE)) |
1755 | return State; |
1756 | |
1757 | // The return value from operator new is bound to a specified initialization |
1758 | // value (if any) and we don't want to loose this value. So we call |
1759 | // MallocUpdateRefState() instead of MallocMemAux() which breaks the |
1760 | // existing binding. |
1761 | SVal Target = Call.getObjectUnderConstruction(); |
1762 | if (Call.getOriginExpr()->isArray()) { |
1763 | if (auto SizeEx = NE->getArraySize()) |
1764 | checkTaintedness(C, Call, SizeSVal: C.getSVal(S: *SizeEx), State, |
1765 | Family: AllocationFamily(AF_CXXNewArray)); |
1766 | } |
1767 | |
1768 | State = MallocUpdateRefState(C, E: NE, State, Family, RetVal: Target); |
1769 | State = ProcessZeroAllocCheck(C, Call, IndexOfSizeArg: 0, State, RetVal: Target); |
1770 | return State; |
1771 | } |
1772 | |
1773 | void MallocChecker::checkNewAllocator(const CXXAllocatorCall &Call, |
1774 | CheckerContext &C) const { |
1775 | if (!C.wasInlined) { |
1776 | ProgramStateRef State = processNewAllocation( |
1777 | Call, C, |
1778 | Family: AllocationFamily(Call.getOriginExpr()->isArray() ? AF_CXXNewArray |
1779 | : AF_CXXNew)); |
1780 | C.addTransition(State); |
1781 | } |
1782 | } |
1783 | |
1784 | static bool isKnownDeallocObjCMethodName(const ObjCMethodCall &Call) { |
1785 | // If the first selector piece is one of the names below, assume that the |
1786 | // object takes ownership of the memory, promising to eventually deallocate it |
1787 | // with free(). |
1788 | // Ex: [NSData dataWithBytesNoCopy:bytes length:10]; |
1789 | // (...unless a 'freeWhenDone' parameter is false, but that's checked later.) |
1790 | StringRef FirstSlot = Call.getSelector().getNameForSlot(argIndex: 0); |
1791 | return FirstSlot == "dataWithBytesNoCopy" || |
1792 | FirstSlot == "initWithBytesNoCopy" || |
1793 | FirstSlot == "initWithCharactersNoCopy" ; |
1794 | } |
1795 | |
1796 | static std::optional<bool> getFreeWhenDoneArg(const ObjCMethodCall &Call) { |
1797 | Selector S = Call.getSelector(); |
1798 | |
1799 | // FIXME: We should not rely on fully-constrained symbols being folded. |
1800 | for (unsigned i = 1; i < S.getNumArgs(); ++i) |
1801 | if (S.getNameForSlot(argIndex: i) == "freeWhenDone" ) |
1802 | return !Call.getArgSVal(Index: i).isZeroConstant(); |
1803 | |
1804 | return std::nullopt; |
1805 | } |
1806 | |
1807 | void MallocChecker::checkPostObjCMessage(const ObjCMethodCall &Call, |
1808 | CheckerContext &C) const { |
1809 | if (C.wasInlined) |
1810 | return; |
1811 | |
1812 | if (!isKnownDeallocObjCMethodName(Call)) |
1813 | return; |
1814 | |
1815 | if (std::optional<bool> FreeWhenDone = getFreeWhenDoneArg(Call)) |
1816 | if (!*FreeWhenDone) |
1817 | return; |
1818 | |
1819 | if (Call.hasNonZeroCallbackArg()) |
1820 | return; |
1821 | |
1822 | bool IsKnownToBeAllocatedMemory; |
1823 | ProgramStateRef State = FreeMemAux(C, ArgExpr: Call.getArgExpr(Index: 0), Call, State: C.getState(), |
1824 | /*Hold=*/true, IsKnownToBeAllocated&: IsKnownToBeAllocatedMemory, |
1825 | Family: AllocationFamily(AF_Malloc), |
1826 | /*ReturnsNullOnFailure=*/true); |
1827 | |
1828 | C.addTransition(State); |
1829 | } |
1830 | |
1831 | ProgramStateRef |
1832 | MallocChecker::MallocMemReturnsAttr(CheckerContext &C, const CallEvent &Call, |
1833 | const OwnershipAttr *Att, |
1834 | ProgramStateRef State) const { |
1835 | if (!State) |
1836 | return nullptr; |
1837 | |
1838 | auto attrClassName = Att->getModule()->getName(); |
1839 | auto Family = AllocationFamily(AF_Custom, attrClassName); |
1840 | |
1841 | if (!Att->args().empty()) { |
1842 | return MallocMemAux(C, Call, |
1843 | SizeEx: Call.getArgExpr(Index: Att->args_begin()->getASTIndex()), |
1844 | Init: UnknownVal(), State, Family); |
1845 | } |
1846 | return MallocMemAux(C, Call, Size: UnknownVal(), Init: UnknownVal(), State, Family); |
1847 | } |
1848 | |
1849 | ProgramStateRef MallocChecker::MallocBindRetVal(CheckerContext &C, |
1850 | const CallEvent &Call, |
1851 | ProgramStateRef State, |
1852 | bool isAlloca) const { |
1853 | const Expr *CE = Call.getOriginExpr(); |
1854 | |
1855 | // We expect the allocation functions to return a pointer. |
1856 | if (!Loc::isLocType(T: CE->getType())) |
1857 | return nullptr; |
1858 | |
1859 | unsigned Count = C.blockCount(); |
1860 | SValBuilder &SVB = C.getSValBuilder(); |
1861 | const LocationContext *LCtx = C.getPredecessor()->getLocationContext(); |
1862 | DefinedSVal RetVal = |
1863 | isAlloca ? SVB.getAllocaRegionVal(E: CE, LCtx, Count) |
1864 | : SVB.getConjuredHeapSymbolVal(elem: Call.getCFGElementRef(), LCtx, |
1865 | type: CE->getType(), Count); |
1866 | return State->BindExpr(S: CE, LCtx: C.getLocationContext(), V: RetVal); |
1867 | } |
1868 | |
1869 | ProgramStateRef MallocChecker::MallocMemAux(CheckerContext &C, |
1870 | const CallEvent &Call, |
1871 | const Expr *SizeEx, SVal Init, |
1872 | ProgramStateRef State, |
1873 | AllocationFamily Family) const { |
1874 | if (!State) |
1875 | return nullptr; |
1876 | |
1877 | assert(SizeEx); |
1878 | return MallocMemAux(C, Call, Size: C.getSVal(S: SizeEx), Init, State, Family); |
1879 | } |
1880 | |
1881 | void MallocChecker::reportTaintBug(StringRef Msg, ProgramStateRef State, |
1882 | CheckerContext &C, |
1883 | llvm::ArrayRef<SymbolRef> TaintedSyms, |
1884 | AllocationFamily Family) const { |
1885 | if (ExplodedNode *N = C.generateNonFatalErrorNode(State, Tag: this)) { |
1886 | if (!BT_TaintedAlloc) |
1887 | BT_TaintedAlloc.reset(p: new BugType(CheckNames[CK_TaintedAllocChecker], |
1888 | "Tainted Memory Allocation" , |
1889 | categories::TaintedData)); |
1890 | auto R = std::make_unique<PathSensitiveBugReport>(args&: *BT_TaintedAlloc, args&: Msg, args&: N); |
1891 | for (const auto *TaintedSym : TaintedSyms) { |
1892 | R->markInteresting(sym: TaintedSym); |
1893 | } |
1894 | C.emitReport(R: std::move(R)); |
1895 | } |
1896 | } |
1897 | |
1898 | void MallocChecker::checkTaintedness(CheckerContext &C, const CallEvent &Call, |
1899 | const SVal SizeSVal, ProgramStateRef State, |
1900 | AllocationFamily Family) const { |
1901 | if (!ChecksEnabled[CK_TaintedAllocChecker]) |
1902 | return; |
1903 | std::vector<SymbolRef> TaintedSyms = |
1904 | taint::getTaintedSymbols(State, V: SizeSVal); |
1905 | if (TaintedSyms.empty()) |
1906 | return; |
1907 | |
1908 | SValBuilder &SVB = C.getSValBuilder(); |
1909 | QualType SizeTy = SVB.getContext().getSizeType(); |
1910 | QualType CmpTy = SVB.getConditionType(); |
1911 | // In case the symbol is tainted, we give a warning if the |
1912 | // size is larger than SIZE_MAX/4 |
1913 | BasicValueFactory &BVF = SVB.getBasicValueFactory(); |
1914 | const llvm::APSInt MaxValInt = BVF.getMaxValue(T: SizeTy); |
1915 | NonLoc MaxLength = |
1916 | SVB.makeIntVal(integer: MaxValInt / APSIntType(MaxValInt).getValue(RawValue: 4)); |
1917 | std::optional<NonLoc> SizeNL = SizeSVal.getAs<NonLoc>(); |
1918 | auto Cmp = SVB.evalBinOpNN(state: State, op: BO_GE, lhs: *SizeNL, rhs: MaxLength, resultTy: CmpTy) |
1919 | .getAs<DefinedOrUnknownSVal>(); |
1920 | if (!Cmp) |
1921 | return; |
1922 | auto [StateTooLarge, StateNotTooLarge] = State->assume(Cond: *Cmp); |
1923 | if (!StateTooLarge && StateNotTooLarge) { |
1924 | // We can prove that size is not too large so there is no issue. |
1925 | return; |
1926 | } |
1927 | |
1928 | std::string Callee = "Memory allocation function" ; |
1929 | if (Call.getCalleeIdentifier()) |
1930 | Callee = Call.getCalleeIdentifier()->getName().str(); |
1931 | reportTaintBug( |
1932 | Msg: Callee + " is called with a tainted (potentially attacker controlled) " |
1933 | "value. Make sure the value is bound checked." , |
1934 | State, C, TaintedSyms, Family); |
1935 | } |
1936 | |
1937 | ProgramStateRef MallocChecker::MallocMemAux(CheckerContext &C, |
1938 | const CallEvent &Call, SVal Size, |
1939 | SVal Init, ProgramStateRef State, |
1940 | AllocationFamily Family) const { |
1941 | if (!State) |
1942 | return nullptr; |
1943 | |
1944 | const Expr *CE = Call.getOriginExpr(); |
1945 | |
1946 | // We expect the malloc functions to return a pointer. |
1947 | // Should have been already checked. |
1948 | assert(Loc::isLocType(CE->getType()) && |
1949 | "Allocation functions must return a pointer" ); |
1950 | |
1951 | const LocationContext *LCtx = C.getPredecessor()->getLocationContext(); |
1952 | SVal RetVal = State->getSVal(Ex: CE, LCtx: C.getLocationContext()); |
1953 | |
1954 | // Fill the region with the initialization value. |
1955 | State = State->bindDefaultInitial(loc: RetVal, V: Init, LCtx); |
1956 | |
1957 | // If Size is somehow undefined at this point, this line prevents a crash. |
1958 | if (Size.isUndef()) |
1959 | Size = UnknownVal(); |
1960 | |
1961 | checkTaintedness(C, Call, SizeSVal: Size, State, Family: AllocationFamily(AF_Malloc)); |
1962 | |
1963 | // Set the region's extent. |
1964 | State = setDynamicExtent(State, MR: RetVal.getAsRegion(), |
1965 | Extent: Size.castAs<DefinedOrUnknownSVal>()); |
1966 | |
1967 | return MallocUpdateRefState(C, E: CE, State, Family); |
1968 | } |
1969 | |
1970 | static ProgramStateRef MallocUpdateRefState(CheckerContext &C, const Expr *E, |
1971 | ProgramStateRef State, |
1972 | AllocationFamily Family, |
1973 | std::optional<SVal> RetVal) { |
1974 | if (!State) |
1975 | return nullptr; |
1976 | |
1977 | // Get the return value. |
1978 | if (!RetVal) |
1979 | RetVal = State->getSVal(Ex: E, LCtx: C.getLocationContext()); |
1980 | |
1981 | // We expect the malloc functions to return a pointer. |
1982 | if (!RetVal->getAs<Loc>()) |
1983 | return nullptr; |
1984 | |
1985 | SymbolRef Sym = RetVal->getAsLocSymbol(); |
1986 | |
1987 | // NOTE: If this was an `alloca()` call, then `RetVal` holds an |
1988 | // `AllocaRegion`, so `Sym` will be a nullpointer because `AllocaRegion`s do |
1989 | // not have an associated symbol. However, this distinct region type means |
1990 | // that we don't need to store anything about them in `RegionState`. |
1991 | |
1992 | if (Sym) |
1993 | return State->set<RegionState>(K: Sym, E: RefState::getAllocated(family: Family, s: E)); |
1994 | |
1995 | return State; |
1996 | } |
1997 | |
1998 | ProgramStateRef MallocChecker::FreeMemAttr(CheckerContext &C, |
1999 | const CallEvent &Call, |
2000 | const OwnershipAttr *Att, |
2001 | ProgramStateRef State) const { |
2002 | if (!State) |
2003 | return nullptr; |
2004 | |
2005 | auto attrClassName = Att->getModule()->getName(); |
2006 | auto Family = AllocationFamily(AF_Custom, attrClassName); |
2007 | |
2008 | bool IsKnownToBeAllocated = false; |
2009 | |
2010 | for (const auto &Arg : Att->args()) { |
2011 | ProgramStateRef StateI = |
2012 | FreeMemAux(C, Call, State, Num: Arg.getASTIndex(), |
2013 | Hold: Att->getOwnKind() == OwnershipAttr::Holds, |
2014 | IsKnownToBeAllocated, Family); |
2015 | if (StateI) |
2016 | State = StateI; |
2017 | } |
2018 | return State; |
2019 | } |
2020 | |
2021 | ProgramStateRef MallocChecker::FreeMemAux(CheckerContext &C, |
2022 | const CallEvent &Call, |
2023 | ProgramStateRef State, unsigned Num, |
2024 | bool Hold, bool &IsKnownToBeAllocated, |
2025 | AllocationFamily Family, |
2026 | bool ReturnsNullOnFailure) const { |
2027 | if (!State) |
2028 | return nullptr; |
2029 | |
2030 | if (Call.getNumArgs() < (Num + 1)) |
2031 | return nullptr; |
2032 | |
2033 | return FreeMemAux(C, ArgExpr: Call.getArgExpr(Index: Num), Call, State, Hold, |
2034 | IsKnownToBeAllocated, Family, ReturnsNullOnFailure); |
2035 | } |
2036 | |
2037 | /// Checks if the previous call to free on the given symbol failed - if free |
2038 | /// failed, returns true. Also, returns the corresponding return value symbol. |
2039 | static bool didPreviousFreeFail(ProgramStateRef State, |
2040 | SymbolRef Sym, SymbolRef &RetStatusSymbol) { |
2041 | const SymbolRef *Ret = State->get<FreeReturnValue>(key: Sym); |
2042 | if (Ret) { |
2043 | assert(*Ret && "We should not store the null return symbol" ); |
2044 | ConstraintManager &CMgr = State->getConstraintManager(); |
2045 | ConditionTruthVal FreeFailed = CMgr.isNull(State, Sym: *Ret); |
2046 | RetStatusSymbol = *Ret; |
2047 | return FreeFailed.isConstrainedTrue(); |
2048 | } |
2049 | return false; |
2050 | } |
2051 | |
2052 | static void printOwnershipTakesList(raw_ostream &os, CheckerContext &C, |
2053 | const Expr *E) { |
2054 | const CallExpr *CE = dyn_cast<CallExpr>(Val: E); |
2055 | |
2056 | if (!CE) |
2057 | return; |
2058 | |
2059 | const FunctionDecl *FD = CE->getDirectCallee(); |
2060 | if (!FD) |
2061 | return; |
2062 | |
2063 | // Only one ownership_takes attribute is allowed. |
2064 | for (const auto *I : FD->specific_attrs<OwnershipAttr>()) { |
2065 | if (I->getOwnKind() != OwnershipAttr::Takes) |
2066 | continue; |
2067 | |
2068 | os << ", which takes ownership of '" << I->getModule()->getName() << '\''; |
2069 | break; |
2070 | } |
2071 | } |
2072 | |
2073 | static bool printMemFnName(raw_ostream &os, CheckerContext &C, const Expr *E) { |
2074 | if (const CallExpr *CE = dyn_cast<CallExpr>(Val: E)) { |
2075 | // FIXME: This doesn't handle indirect calls. |
2076 | const FunctionDecl *FD = CE->getDirectCallee(); |
2077 | if (!FD) |
2078 | return false; |
2079 | |
2080 | os << '\'' << *FD; |
2081 | |
2082 | if (!FD->isOverloadedOperator()) |
2083 | os << "()" ; |
2084 | |
2085 | os << '\''; |
2086 | return true; |
2087 | } |
2088 | |
2089 | if (const ObjCMessageExpr *Msg = dyn_cast<ObjCMessageExpr>(Val: E)) { |
2090 | if (Msg->isInstanceMessage()) |
2091 | os << "-" ; |
2092 | else |
2093 | os << "+" ; |
2094 | Msg->getSelector().print(OS&: os); |
2095 | return true; |
2096 | } |
2097 | |
2098 | if (const CXXNewExpr *NE = dyn_cast<CXXNewExpr>(Val: E)) { |
2099 | os << "'" |
2100 | << getOperatorSpelling(Operator: NE->getOperatorNew()->getOverloadedOperator()) |
2101 | << "'" ; |
2102 | return true; |
2103 | } |
2104 | |
2105 | if (const CXXDeleteExpr *DE = dyn_cast<CXXDeleteExpr>(Val: E)) { |
2106 | os << "'" |
2107 | << getOperatorSpelling(Operator: DE->getOperatorDelete()->getOverloadedOperator()) |
2108 | << "'" ; |
2109 | return true; |
2110 | } |
2111 | |
2112 | return false; |
2113 | } |
2114 | |
2115 | static void printExpectedAllocName(raw_ostream &os, AllocationFamily Family) { |
2116 | |
2117 | switch (Family.Kind) { |
2118 | case AF_Malloc: |
2119 | os << "'malloc()'" ; |
2120 | return; |
2121 | case AF_CXXNew: |
2122 | os << "'new'" ; |
2123 | return; |
2124 | case AF_CXXNewArray: |
2125 | os << "'new[]'" ; |
2126 | return; |
2127 | case AF_IfNameIndex: |
2128 | os << "'if_nameindex()'" ; |
2129 | return; |
2130 | case AF_InnerBuffer: |
2131 | os << "container-specific allocator" ; |
2132 | return; |
2133 | case AF_Custom: |
2134 | os << Family.CustomName.value(); |
2135 | return; |
2136 | case AF_Alloca: |
2137 | case AF_None: |
2138 | assert(false && "not a deallocation expression" ); |
2139 | } |
2140 | } |
2141 | |
2142 | static void printExpectedDeallocName(raw_ostream &os, AllocationFamily Family) { |
2143 | switch (Family.Kind) { |
2144 | case AF_Malloc: |
2145 | os << "'free()'" ; |
2146 | return; |
2147 | case AF_CXXNew: |
2148 | os << "'delete'" ; |
2149 | return; |
2150 | case AF_CXXNewArray: |
2151 | os << "'delete[]'" ; |
2152 | return; |
2153 | case AF_IfNameIndex: |
2154 | os << "'if_freenameindex()'" ; |
2155 | return; |
2156 | case AF_InnerBuffer: |
2157 | os << "container-specific deallocator" ; |
2158 | return; |
2159 | case AF_Custom: |
2160 | os << "function that takes ownership of '" << Family.CustomName.value() |
2161 | << "\'" ; |
2162 | return; |
2163 | case AF_Alloca: |
2164 | case AF_None: |
2165 | assert(false && "not a deallocation expression" ); |
2166 | } |
2167 | } |
2168 | |
2169 | ProgramStateRef |
2170 | MallocChecker::FreeMemAux(CheckerContext &C, const Expr *ArgExpr, |
2171 | const CallEvent &Call, ProgramStateRef State, |
2172 | bool Hold, bool &IsKnownToBeAllocated, |
2173 | AllocationFamily Family, bool ReturnsNullOnFailure, |
2174 | std::optional<SVal> ArgValOpt) const { |
2175 | |
2176 | if (!State) |
2177 | return nullptr; |
2178 | |
2179 | SVal ArgVal = ArgValOpt.value_or(u: C.getSVal(S: ArgExpr)); |
2180 | if (!isa<DefinedOrUnknownSVal>(Val: ArgVal)) |
2181 | return nullptr; |
2182 | DefinedOrUnknownSVal location = ArgVal.castAs<DefinedOrUnknownSVal>(); |
2183 | |
2184 | // Check for null dereferences. |
2185 | if (!isa<Loc>(Val: location)) |
2186 | return nullptr; |
2187 | |
2188 | // The explicit NULL case, no operation is performed. |
2189 | ProgramStateRef notNullState, nullState; |
2190 | std::tie(args&: notNullState, args&: nullState) = State->assume(Cond: location); |
2191 | if (nullState && !notNullState) |
2192 | return nullptr; |
2193 | |
2194 | // Unknown values could easily be okay |
2195 | // Undefined values are handled elsewhere |
2196 | if (ArgVal.isUnknownOrUndef()) |
2197 | return nullptr; |
2198 | |
2199 | const MemRegion *R = ArgVal.getAsRegion(); |
2200 | const Expr *ParentExpr = Call.getOriginExpr(); |
2201 | |
2202 | // NOTE: We detected a bug, but the checker under whose name we would emit the |
2203 | // error could be disabled. Generally speaking, the MallocChecker family is an |
2204 | // integral part of the Static Analyzer, and disabling any part of it should |
2205 | // only be done under exceptional circumstances, such as frequent false |
2206 | // positives. If this is the case, we can reasonably believe that there are |
2207 | // serious faults in our understanding of the source code, and even if we |
2208 | // don't emit an warning, we should terminate further analysis with a sink |
2209 | // node. |
2210 | |
2211 | // Nonlocs can't be freed, of course. |
2212 | // Non-region locations (labels and fixed addresses) also shouldn't be freed. |
2213 | if (!R) { |
2214 | // Exception: |
2215 | // If the macro ZERO_SIZE_PTR is defined, this could be a kernel source |
2216 | // code. In that case, the ZERO_SIZE_PTR defines a special value used for a |
2217 | // zero-sized memory block which is allowed to be freed, despite not being a |
2218 | // null pointer. |
2219 | if (Family.Kind != AF_Malloc || !isArgZERO_SIZE_PTR(State, C, ArgVal)) |
2220 | HandleNonHeapDealloc(C, ArgVal, Range: ArgExpr->getSourceRange(), DeallocExpr: ParentExpr, |
2221 | Family); |
2222 | return nullptr; |
2223 | } |
2224 | |
2225 | R = R->StripCasts(); |
2226 | |
2227 | // Blocks might show up as heap data, but should not be free()d |
2228 | if (isa<BlockDataRegion>(Val: R)) { |
2229 | HandleNonHeapDealloc(C, ArgVal, Range: ArgExpr->getSourceRange(), DeallocExpr: ParentExpr, |
2230 | Family); |
2231 | return nullptr; |
2232 | } |
2233 | |
2234 | // Parameters, locals, statics, globals, and memory returned by |
2235 | // __builtin_alloca() shouldn't be freed. |
2236 | if (!R->hasMemorySpace<UnknownSpaceRegion, HeapSpaceRegion>(State)) { |
2237 | // Regions returned by malloc() are represented by SymbolicRegion objects |
2238 | // within HeapSpaceRegion. Of course, free() can work on memory allocated |
2239 | // outside the current function, so UnknownSpaceRegion is also a |
2240 | // possibility here. |
2241 | |
2242 | if (isa<AllocaRegion>(Val: R)) |
2243 | HandleFreeAlloca(C, ArgVal, Range: ArgExpr->getSourceRange()); |
2244 | else |
2245 | HandleNonHeapDealloc(C, ArgVal, Range: ArgExpr->getSourceRange(), DeallocExpr: ParentExpr, |
2246 | Family); |
2247 | |
2248 | return nullptr; |
2249 | } |
2250 | |
2251 | const SymbolicRegion *SrBase = dyn_cast<SymbolicRegion>(Val: R->getBaseRegion()); |
2252 | // Various cases could lead to non-symbol values here. |
2253 | // For now, ignore them. |
2254 | if (!SrBase) |
2255 | return nullptr; |
2256 | |
2257 | SymbolRef SymBase = SrBase->getSymbol(); |
2258 | const RefState *RsBase = State->get<RegionState>(key: SymBase); |
2259 | SymbolRef PreviousRetStatusSymbol = nullptr; |
2260 | |
2261 | IsKnownToBeAllocated = |
2262 | RsBase && (RsBase->isAllocated() || RsBase->isAllocatedOfSizeZero()); |
2263 | |
2264 | if (RsBase) { |
2265 | |
2266 | // Memory returned by alloca() shouldn't be freed. |
2267 | if (RsBase->getAllocationFamily().Kind == AF_Alloca) { |
2268 | HandleFreeAlloca(C, ArgVal, Range: ArgExpr->getSourceRange()); |
2269 | return nullptr; |
2270 | } |
2271 | |
2272 | // Check for double free first. |
2273 | if ((RsBase->isReleased() || RsBase->isRelinquished()) && |
2274 | !didPreviousFreeFail(State, Sym: SymBase, RetStatusSymbol&: PreviousRetStatusSymbol)) { |
2275 | HandleDoubleFree(C, Range: ParentExpr->getSourceRange(), Released: RsBase->isReleased(), |
2276 | Sym: SymBase, PrevSym: PreviousRetStatusSymbol); |
2277 | return nullptr; |
2278 | } |
2279 | |
2280 | // If the pointer is allocated or escaped, but we are now trying to free it, |
2281 | // check that the call to free is proper. |
2282 | if (RsBase->isAllocated() || RsBase->isAllocatedOfSizeZero() || |
2283 | RsBase->isEscaped()) { |
2284 | |
2285 | // Check if an expected deallocation function matches the real one. |
2286 | bool DeallocMatchesAlloc = RsBase->getAllocationFamily() == Family; |
2287 | if (!DeallocMatchesAlloc) { |
2288 | HandleMismatchedDealloc(C, Range: ArgExpr->getSourceRange(), DeallocExpr: ParentExpr, |
2289 | RS: RsBase, Sym: SymBase, OwnershipTransferred: Hold); |
2290 | return nullptr; |
2291 | } |
2292 | |
2293 | // Check if the memory location being freed is the actual location |
2294 | // allocated, or an offset. |
2295 | RegionOffset Offset = R->getAsOffset(); |
2296 | if (Offset.isValid() && |
2297 | !Offset.hasSymbolicOffset() && |
2298 | Offset.getOffset() != 0) { |
2299 | const Expr *AllocExpr = cast<Expr>(Val: RsBase->getStmt()); |
2300 | HandleOffsetFree(C, ArgVal, Range: ArgExpr->getSourceRange(), DeallocExpr: ParentExpr, |
2301 | Family, AllocExpr); |
2302 | return nullptr; |
2303 | } |
2304 | } |
2305 | } |
2306 | |
2307 | if (SymBase->getType()->isFunctionPointerType()) { |
2308 | HandleFunctionPtrFree(C, ArgVal, Range: ArgExpr->getSourceRange(), FreeExpr: ParentExpr, |
2309 | Family); |
2310 | return nullptr; |
2311 | } |
2312 | |
2313 | // Clean out the info on previous call to free return info. |
2314 | State = State->remove<FreeReturnValue>(K: SymBase); |
2315 | |
2316 | // Keep track of the return value. If it is NULL, we will know that free |
2317 | // failed. |
2318 | if (ReturnsNullOnFailure) { |
2319 | SVal RetVal = C.getSVal(S: ParentExpr); |
2320 | SymbolRef RetStatusSymbol = RetVal.getAsSymbol(); |
2321 | if (RetStatusSymbol) { |
2322 | C.getSymbolManager().addSymbolDependency(Primary: SymBase, Dependent: RetStatusSymbol); |
2323 | State = State->set<FreeReturnValue>(K: SymBase, E: RetStatusSymbol); |
2324 | } |
2325 | } |
2326 | |
2327 | // If we don't know anything about this symbol, a free on it may be totally |
2328 | // valid. If this is the case, lets assume that the allocation family of the |
2329 | // freeing function is the same as the symbols allocation family, and go with |
2330 | // that. |
2331 | assert(!RsBase || (RsBase && RsBase->getAllocationFamily() == Family)); |
2332 | |
2333 | // Assume that after memory is freed, it contains unknown values. This |
2334 | // conforts languages standards, since reading from freed memory is considered |
2335 | // UB and may result in arbitrary value. |
2336 | State = State->invalidateRegions(Values: {location}, Elem: Call.getCFGElementRef(), |
2337 | BlockCount: C.blockCount(), LCtx: C.getLocationContext(), |
2338 | /*CausesPointerEscape=*/false, |
2339 | /*InvalidatedSymbols=*/IS: nullptr); |
2340 | |
2341 | // Normal free. |
2342 | if (Hold) |
2343 | return State->set<RegionState>(K: SymBase, |
2344 | E: RefState::getRelinquished(family: Family, |
2345 | s: ParentExpr)); |
2346 | |
2347 | return State->set<RegionState>(K: SymBase, |
2348 | E: RefState::getReleased(family: Family, s: ParentExpr)); |
2349 | } |
2350 | |
2351 | std::optional<MallocChecker::CheckKind> |
2352 | MallocChecker::getCheckIfTracked(AllocationFamily Family, |
2353 | bool IsALeakCheck) const { |
2354 | switch (Family.Kind) { |
2355 | case AF_Malloc: |
2356 | case AF_Alloca: |
2357 | case AF_Custom: |
2358 | case AF_IfNameIndex: { |
2359 | if (ChecksEnabled[CK_MallocChecker]) |
2360 | return CK_MallocChecker; |
2361 | return std::nullopt; |
2362 | } |
2363 | case AF_CXXNew: |
2364 | case AF_CXXNewArray: { |
2365 | if (IsALeakCheck) { |
2366 | if (ChecksEnabled[CK_NewDeleteLeaksChecker]) |
2367 | return CK_NewDeleteLeaksChecker; |
2368 | } |
2369 | else { |
2370 | if (ChecksEnabled[CK_NewDeleteChecker]) |
2371 | return CK_NewDeleteChecker; |
2372 | } |
2373 | return std::nullopt; |
2374 | } |
2375 | case AF_InnerBuffer: { |
2376 | if (ChecksEnabled[CK_InnerPointerChecker]) |
2377 | return CK_InnerPointerChecker; |
2378 | return std::nullopt; |
2379 | } |
2380 | case AF_None: { |
2381 | assert(false && "no family" ); |
2382 | return std::nullopt; |
2383 | } |
2384 | } |
2385 | assert(false && "unhandled family" ); |
2386 | return std::nullopt; |
2387 | } |
2388 | |
2389 | std::optional<MallocChecker::CheckKind> |
2390 | MallocChecker::getCheckIfTracked(CheckerContext &C, SymbolRef Sym, |
2391 | bool IsALeakCheck) const { |
2392 | if (C.getState()->contains<ReallocSizeZeroSymbols>(key: Sym)) |
2393 | return CK_MallocChecker; |
2394 | |
2395 | const RefState *RS = C.getState()->get<RegionState>(key: Sym); |
2396 | assert(RS); |
2397 | return getCheckIfTracked(Family: RS->getAllocationFamily(), IsALeakCheck); |
2398 | } |
2399 | |
2400 | bool MallocChecker::SummarizeValue(raw_ostream &os, SVal V) { |
2401 | if (std::optional<nonloc::ConcreteInt> IntVal = |
2402 | V.getAs<nonloc::ConcreteInt>()) |
2403 | os << "an integer (" << IntVal->getValue() << ")" ; |
2404 | else if (std::optional<loc::ConcreteInt> ConstAddr = |
2405 | V.getAs<loc::ConcreteInt>()) |
2406 | os << "a constant address (" << ConstAddr->getValue() << ")" ; |
2407 | else if (std::optional<loc::GotoLabel> Label = V.getAs<loc::GotoLabel>()) |
2408 | os << "the address of the label '" << Label->getLabel()->getName() << "'" ; |
2409 | else |
2410 | return false; |
2411 | |
2412 | return true; |
2413 | } |
2414 | |
2415 | bool MallocChecker::SummarizeRegion(ProgramStateRef State, raw_ostream &os, |
2416 | const MemRegion *MR) { |
2417 | switch (MR->getKind()) { |
2418 | case MemRegion::FunctionCodeRegionKind: { |
2419 | const NamedDecl *FD = cast<FunctionCodeRegion>(Val: MR)->getDecl(); |
2420 | if (FD) |
2421 | os << "the address of the function '" << *FD << '\''; |
2422 | else |
2423 | os << "the address of a function" ; |
2424 | return true; |
2425 | } |
2426 | case MemRegion::BlockCodeRegionKind: |
2427 | os << "block text" ; |
2428 | return true; |
2429 | case MemRegion::BlockDataRegionKind: |
2430 | // FIXME: where the block came from? |
2431 | os << "a block" ; |
2432 | return true; |
2433 | default: { |
2434 | const MemSpaceRegion *MS = MR->getMemorySpace(State); |
2435 | |
2436 | if (isa<StackLocalsSpaceRegion>(Val: MS)) { |
2437 | const VarRegion *VR = dyn_cast<VarRegion>(Val: MR); |
2438 | const VarDecl *VD; |
2439 | if (VR) |
2440 | VD = VR->getDecl(); |
2441 | else |
2442 | VD = nullptr; |
2443 | |
2444 | if (VD) |
2445 | os << "the address of the local variable '" << VD->getName() << "'" ; |
2446 | else |
2447 | os << "the address of a local stack variable" ; |
2448 | return true; |
2449 | } |
2450 | |
2451 | if (isa<StackArgumentsSpaceRegion>(Val: MS)) { |
2452 | const VarRegion *VR = dyn_cast<VarRegion>(Val: MR); |
2453 | const VarDecl *VD; |
2454 | if (VR) |
2455 | VD = VR->getDecl(); |
2456 | else |
2457 | VD = nullptr; |
2458 | |
2459 | if (VD) |
2460 | os << "the address of the parameter '" << VD->getName() << "'" ; |
2461 | else |
2462 | os << "the address of a parameter" ; |
2463 | return true; |
2464 | } |
2465 | |
2466 | if (isa<GlobalsSpaceRegion>(Val: MS)) { |
2467 | const VarRegion *VR = dyn_cast<VarRegion>(Val: MR); |
2468 | const VarDecl *VD; |
2469 | if (VR) |
2470 | VD = VR->getDecl(); |
2471 | else |
2472 | VD = nullptr; |
2473 | |
2474 | if (VD) { |
2475 | if (VD->isStaticLocal()) |
2476 | os << "the address of the static variable '" << VD->getName() << "'" ; |
2477 | else |
2478 | os << "the address of the global variable '" << VD->getName() << "'" ; |
2479 | } else |
2480 | os << "the address of a global variable" ; |
2481 | return true; |
2482 | } |
2483 | |
2484 | return false; |
2485 | } |
2486 | } |
2487 | } |
2488 | |
2489 | void MallocChecker::HandleNonHeapDealloc(CheckerContext &C, SVal ArgVal, |
2490 | SourceRange Range, |
2491 | const Expr *DeallocExpr, |
2492 | AllocationFamily Family) const { |
2493 | |
2494 | if (!ChecksEnabled[CK_MallocChecker] && !ChecksEnabled[CK_NewDeleteChecker]) { |
2495 | C.addSink(); |
2496 | return; |
2497 | } |
2498 | |
2499 | std::optional<MallocChecker::CheckKind> CheckKind = getCheckIfTracked(Family); |
2500 | if (!CheckKind) |
2501 | return; |
2502 | |
2503 | if (ExplodedNode *N = C.generateErrorNode()) { |
2504 | if (!BT_BadFree[*CheckKind]) |
2505 | BT_BadFree[*CheckKind].reset(p: new BugType( |
2506 | CheckNames[*CheckKind], "Bad free" , categories::MemoryError)); |
2507 | |
2508 | SmallString<100> buf; |
2509 | llvm::raw_svector_ostream os(buf); |
2510 | |
2511 | const MemRegion *MR = ArgVal.getAsRegion(); |
2512 | while (const ElementRegion *ER = dyn_cast_or_null<ElementRegion>(Val: MR)) |
2513 | MR = ER->getSuperRegion(); |
2514 | |
2515 | os << "Argument to " ; |
2516 | if (!printMemFnName(os, C, E: DeallocExpr)) |
2517 | os << "deallocator" ; |
2518 | |
2519 | os << " is " ; |
2520 | bool Summarized = |
2521 | MR ? SummarizeRegion(State: C.getState(), os, MR) : SummarizeValue(os, V: ArgVal); |
2522 | if (Summarized) |
2523 | os << ", which is not memory allocated by " ; |
2524 | else |
2525 | os << "not memory allocated by " ; |
2526 | |
2527 | printExpectedAllocName(os, Family); |
2528 | |
2529 | auto R = std::make_unique<PathSensitiveBugReport>(args&: *BT_BadFree[*CheckKind], |
2530 | args: os.str(), args&: N); |
2531 | R->markInteresting(R: MR); |
2532 | R->addRange(R: Range); |
2533 | C.emitReport(R: std::move(R)); |
2534 | } |
2535 | } |
2536 | |
2537 | void MallocChecker::HandleFreeAlloca(CheckerContext &C, SVal ArgVal, |
2538 | SourceRange Range) const { |
2539 | |
2540 | std::optional<MallocChecker::CheckKind> CheckKind; |
2541 | |
2542 | if (ChecksEnabled[CK_MallocChecker]) |
2543 | CheckKind = CK_MallocChecker; |
2544 | else if (ChecksEnabled[CK_MismatchedDeallocatorChecker]) |
2545 | CheckKind = CK_MismatchedDeallocatorChecker; |
2546 | else { |
2547 | C.addSink(); |
2548 | return; |
2549 | } |
2550 | |
2551 | if (ExplodedNode *N = C.generateErrorNode()) { |
2552 | if (!BT_FreeAlloca[*CheckKind]) |
2553 | BT_FreeAlloca[*CheckKind].reset(p: new BugType( |
2554 | CheckNames[*CheckKind], "Free 'alloca()'" , categories::MemoryError)); |
2555 | |
2556 | auto R = std::make_unique<PathSensitiveBugReport>( |
2557 | args&: *BT_FreeAlloca[*CheckKind], |
2558 | args: "Memory allocated by 'alloca()' should not be deallocated" , args&: N); |
2559 | R->markInteresting(R: ArgVal.getAsRegion()); |
2560 | R->addRange(R: Range); |
2561 | C.emitReport(R: std::move(R)); |
2562 | } |
2563 | } |
2564 | |
2565 | void MallocChecker::HandleMismatchedDealloc(CheckerContext &C, |
2566 | SourceRange Range, |
2567 | const Expr *DeallocExpr, |
2568 | const RefState *RS, SymbolRef Sym, |
2569 | bool OwnershipTransferred) const { |
2570 | |
2571 | if (!ChecksEnabled[CK_MismatchedDeallocatorChecker]) { |
2572 | C.addSink(); |
2573 | return; |
2574 | } |
2575 | |
2576 | if (ExplodedNode *N = C.generateErrorNode()) { |
2577 | if (!BT_MismatchedDealloc) |
2578 | BT_MismatchedDealloc.reset( |
2579 | p: new BugType(CheckNames[CK_MismatchedDeallocatorChecker], |
2580 | "Bad deallocator" , categories::MemoryError)); |
2581 | |
2582 | SmallString<100> buf; |
2583 | llvm::raw_svector_ostream os(buf); |
2584 | |
2585 | const Expr *AllocExpr = cast<Expr>(Val: RS->getStmt()); |
2586 | SmallString<20> AllocBuf; |
2587 | llvm::raw_svector_ostream AllocOs(AllocBuf); |
2588 | SmallString<20> DeallocBuf; |
2589 | llvm::raw_svector_ostream DeallocOs(DeallocBuf); |
2590 | |
2591 | if (OwnershipTransferred) { |
2592 | if (printMemFnName(os&: DeallocOs, C, E: DeallocExpr)) |
2593 | os << DeallocOs.str() << " cannot" ; |
2594 | else |
2595 | os << "Cannot" ; |
2596 | |
2597 | os << " take ownership of memory" ; |
2598 | |
2599 | if (printMemFnName(os&: AllocOs, C, E: AllocExpr)) |
2600 | os << " allocated by " << AllocOs.str(); |
2601 | } else { |
2602 | os << "Memory" ; |
2603 | if (printMemFnName(os&: AllocOs, C, E: AllocExpr)) |
2604 | os << " allocated by " << AllocOs.str(); |
2605 | |
2606 | os << " should be deallocated by " ; |
2607 | printExpectedDeallocName(os, Family: RS->getAllocationFamily()); |
2608 | |
2609 | if (printMemFnName(os&: DeallocOs, C, E: DeallocExpr)) |
2610 | os << ", not " << DeallocOs.str(); |
2611 | |
2612 | printOwnershipTakesList(os, C, E: DeallocExpr); |
2613 | } |
2614 | |
2615 | auto R = std::make_unique<PathSensitiveBugReport>(args&: *BT_MismatchedDealloc, |
2616 | args: os.str(), args&: N); |
2617 | R->markInteresting(sym: Sym); |
2618 | R->addRange(R: Range); |
2619 | R->addVisitor<MallocBugVisitor>(ConstructorArgs&: Sym); |
2620 | C.emitReport(R: std::move(R)); |
2621 | } |
2622 | } |
2623 | |
2624 | void MallocChecker::HandleOffsetFree(CheckerContext &C, SVal ArgVal, |
2625 | SourceRange Range, const Expr *DeallocExpr, |
2626 | AllocationFamily Family, |
2627 | const Expr *AllocExpr) const { |
2628 | |
2629 | if (!ChecksEnabled[CK_MallocChecker] && !ChecksEnabled[CK_NewDeleteChecker]) { |
2630 | C.addSink(); |
2631 | return; |
2632 | } |
2633 | |
2634 | std::optional<MallocChecker::CheckKind> CheckKind = getCheckIfTracked(Family); |
2635 | if (!CheckKind) |
2636 | return; |
2637 | |
2638 | ExplodedNode *N = C.generateErrorNode(); |
2639 | if (!N) |
2640 | return; |
2641 | |
2642 | if (!BT_OffsetFree[*CheckKind]) |
2643 | BT_OffsetFree[*CheckKind].reset(p: new BugType( |
2644 | CheckNames[*CheckKind], "Offset free" , categories::MemoryError)); |
2645 | |
2646 | SmallString<100> buf; |
2647 | llvm::raw_svector_ostream os(buf); |
2648 | SmallString<20> AllocNameBuf; |
2649 | llvm::raw_svector_ostream AllocNameOs(AllocNameBuf); |
2650 | |
2651 | const MemRegion *MR = ArgVal.getAsRegion(); |
2652 | assert(MR && "Only MemRegion based symbols can have offset free errors" ); |
2653 | |
2654 | RegionOffset Offset = MR->getAsOffset(); |
2655 | assert((Offset.isValid() && |
2656 | !Offset.hasSymbolicOffset() && |
2657 | Offset.getOffset() != 0) && |
2658 | "Only symbols with a valid offset can have offset free errors" ); |
2659 | |
2660 | int offsetBytes = Offset.getOffset() / C.getASTContext().getCharWidth(); |
2661 | |
2662 | os << "Argument to " ; |
2663 | if (!printMemFnName(os, C, E: DeallocExpr)) |
2664 | os << "deallocator" ; |
2665 | os << " is offset by " |
2666 | << offsetBytes |
2667 | << " " |
2668 | << ((abs(x: offsetBytes) > 1) ? "bytes" : "byte" ) |
2669 | << " from the start of " ; |
2670 | if (AllocExpr && printMemFnName(os&: AllocNameOs, C, E: AllocExpr)) |
2671 | os << "memory allocated by " << AllocNameOs.str(); |
2672 | else |
2673 | os << "allocated memory" ; |
2674 | |
2675 | auto R = std::make_unique<PathSensitiveBugReport>(args&: *BT_OffsetFree[*CheckKind], |
2676 | args: os.str(), args&: N); |
2677 | R->markInteresting(R: MR->getBaseRegion()); |
2678 | R->addRange(R: Range); |
2679 | C.emitReport(R: std::move(R)); |
2680 | } |
2681 | |
2682 | void MallocChecker::HandleUseAfterFree(CheckerContext &C, SourceRange Range, |
2683 | SymbolRef Sym) const { |
2684 | |
2685 | if (!ChecksEnabled[CK_MallocChecker] && !ChecksEnabled[CK_NewDeleteChecker] && |
2686 | !ChecksEnabled[CK_InnerPointerChecker]) { |
2687 | C.addSink(); |
2688 | return; |
2689 | } |
2690 | |
2691 | std::optional<MallocChecker::CheckKind> CheckKind = getCheckIfTracked(C, Sym); |
2692 | if (!CheckKind) |
2693 | return; |
2694 | |
2695 | if (ExplodedNode *N = C.generateErrorNode()) { |
2696 | if (!BT_UseFree[*CheckKind]) |
2697 | BT_UseFree[*CheckKind].reset(p: new BugType( |
2698 | CheckNames[*CheckKind], "Use-after-free" , categories::MemoryError)); |
2699 | |
2700 | AllocationFamily AF = |
2701 | C.getState()->get<RegionState>(key: Sym)->getAllocationFamily(); |
2702 | |
2703 | auto R = std::make_unique<PathSensitiveBugReport>( |
2704 | args&: *BT_UseFree[*CheckKind], |
2705 | args: AF.Kind == AF_InnerBuffer |
2706 | ? "Inner pointer of container used after re/deallocation" |
2707 | : "Use of memory after it is freed" , |
2708 | args&: N); |
2709 | |
2710 | R->markInteresting(sym: Sym); |
2711 | R->addRange(R: Range); |
2712 | R->addVisitor<MallocBugVisitor>(ConstructorArgs&: Sym); |
2713 | |
2714 | if (AF.Kind == AF_InnerBuffer) |
2715 | R->addVisitor(visitor: allocation_state::getInnerPointerBRVisitor(Sym)); |
2716 | |
2717 | C.emitReport(R: std::move(R)); |
2718 | } |
2719 | } |
2720 | |
2721 | void MallocChecker::HandleDoubleFree(CheckerContext &C, SourceRange Range, |
2722 | bool Released, SymbolRef Sym, |
2723 | SymbolRef PrevSym) const { |
2724 | |
2725 | if (!ChecksEnabled[CK_MallocChecker] && !ChecksEnabled[CK_NewDeleteChecker]) { |
2726 | C.addSink(); |
2727 | return; |
2728 | } |
2729 | |
2730 | std::optional<MallocChecker::CheckKind> CheckKind = getCheckIfTracked(C, Sym); |
2731 | if (!CheckKind) |
2732 | return; |
2733 | |
2734 | if (ExplodedNode *N = C.generateErrorNode()) { |
2735 | if (!BT_DoubleFree[*CheckKind]) |
2736 | BT_DoubleFree[*CheckKind].reset(p: new BugType( |
2737 | CheckNames[*CheckKind], "Double free" , categories::MemoryError)); |
2738 | |
2739 | auto R = std::make_unique<PathSensitiveBugReport>( |
2740 | args&: *BT_DoubleFree[*CheckKind], |
2741 | args: (Released ? "Attempt to free released memory" |
2742 | : "Attempt to free non-owned memory" ), |
2743 | args&: N); |
2744 | R->addRange(R: Range); |
2745 | R->markInteresting(sym: Sym); |
2746 | if (PrevSym) |
2747 | R->markInteresting(sym: PrevSym); |
2748 | R->addVisitor<MallocBugVisitor>(ConstructorArgs&: Sym); |
2749 | C.emitReport(R: std::move(R)); |
2750 | } |
2751 | } |
2752 | |
2753 | void MallocChecker::HandleDoubleDelete(CheckerContext &C, SymbolRef Sym) const { |
2754 | |
2755 | if (!ChecksEnabled[CK_NewDeleteChecker]) { |
2756 | C.addSink(); |
2757 | return; |
2758 | } |
2759 | |
2760 | std::optional<MallocChecker::CheckKind> CheckKind = getCheckIfTracked(C, Sym); |
2761 | if (!CheckKind) |
2762 | return; |
2763 | |
2764 | if (ExplodedNode *N = C.generateErrorNode()) { |
2765 | if (!BT_DoubleDelete) |
2766 | BT_DoubleDelete.reset(p: new BugType(CheckNames[CK_NewDeleteChecker], |
2767 | "Double delete" , |
2768 | categories::MemoryError)); |
2769 | |
2770 | auto R = std::make_unique<PathSensitiveBugReport>( |
2771 | args&: *BT_DoubleDelete, args: "Attempt to delete released memory" , args&: N); |
2772 | |
2773 | R->markInteresting(sym: Sym); |
2774 | R->addVisitor<MallocBugVisitor>(ConstructorArgs&: Sym); |
2775 | C.emitReport(R: std::move(R)); |
2776 | } |
2777 | } |
2778 | |
2779 | void MallocChecker::HandleUseZeroAlloc(CheckerContext &C, SourceRange Range, |
2780 | SymbolRef Sym) const { |
2781 | |
2782 | if (!ChecksEnabled[CK_MallocChecker] && !ChecksEnabled[CK_NewDeleteChecker]) { |
2783 | C.addSink(); |
2784 | return; |
2785 | } |
2786 | |
2787 | std::optional<MallocChecker::CheckKind> CheckKind = getCheckIfTracked(C, Sym); |
2788 | |
2789 | if (!CheckKind) |
2790 | return; |
2791 | |
2792 | if (ExplodedNode *N = C.generateErrorNode()) { |
2793 | if (!BT_UseZerroAllocated[*CheckKind]) |
2794 | BT_UseZerroAllocated[*CheckKind].reset( |
2795 | p: new BugType(CheckNames[*CheckKind], "Use of zero allocated" , |
2796 | categories::MemoryError)); |
2797 | |
2798 | auto R = std::make_unique<PathSensitiveBugReport>( |
2799 | args&: *BT_UseZerroAllocated[*CheckKind], |
2800 | args: "Use of memory allocated with size zero" , args&: N); |
2801 | |
2802 | R->addRange(R: Range); |
2803 | if (Sym) { |
2804 | R->markInteresting(sym: Sym); |
2805 | R->addVisitor<MallocBugVisitor>(ConstructorArgs&: Sym); |
2806 | } |
2807 | C.emitReport(R: std::move(R)); |
2808 | } |
2809 | } |
2810 | |
2811 | void MallocChecker::HandleFunctionPtrFree(CheckerContext &C, SVal ArgVal, |
2812 | SourceRange Range, |
2813 | const Expr *FreeExpr, |
2814 | AllocationFamily Family) const { |
2815 | if (!ChecksEnabled[CK_MallocChecker]) { |
2816 | C.addSink(); |
2817 | return; |
2818 | } |
2819 | |
2820 | std::optional<MallocChecker::CheckKind> CheckKind = getCheckIfTracked(Family); |
2821 | if (!CheckKind) |
2822 | return; |
2823 | |
2824 | if (ExplodedNode *N = C.generateErrorNode()) { |
2825 | if (!BT_BadFree[*CheckKind]) |
2826 | BT_BadFree[*CheckKind].reset(p: new BugType( |
2827 | CheckNames[*CheckKind], "Bad free" , categories::MemoryError)); |
2828 | |
2829 | SmallString<100> Buf; |
2830 | llvm::raw_svector_ostream Os(Buf); |
2831 | |
2832 | const MemRegion *MR = ArgVal.getAsRegion(); |
2833 | while (const ElementRegion *ER = dyn_cast_or_null<ElementRegion>(Val: MR)) |
2834 | MR = ER->getSuperRegion(); |
2835 | |
2836 | Os << "Argument to " ; |
2837 | if (!printMemFnName(os&: Os, C, E: FreeExpr)) |
2838 | Os << "deallocator" ; |
2839 | |
2840 | Os << " is a function pointer" ; |
2841 | |
2842 | auto R = std::make_unique<PathSensitiveBugReport>(args&: *BT_BadFree[*CheckKind], |
2843 | args: Os.str(), args&: N); |
2844 | R->markInteresting(R: MR); |
2845 | R->addRange(R: Range); |
2846 | C.emitReport(R: std::move(R)); |
2847 | } |
2848 | } |
2849 | |
2850 | ProgramStateRef |
2851 | MallocChecker::ReallocMemAux(CheckerContext &C, const CallEvent &Call, |
2852 | bool ShouldFreeOnFail, ProgramStateRef State, |
2853 | AllocationFamily Family, bool SuffixWithN) const { |
2854 | if (!State) |
2855 | return nullptr; |
2856 | |
2857 | const CallExpr *CE = cast<CallExpr>(Val: Call.getOriginExpr()); |
2858 | |
2859 | if ((SuffixWithN && CE->getNumArgs() < 3) || CE->getNumArgs() < 2) |
2860 | return nullptr; |
2861 | |
2862 | const Expr *arg0Expr = CE->getArg(Arg: 0); |
2863 | SVal Arg0Val = C.getSVal(S: arg0Expr); |
2864 | if (!isa<DefinedOrUnknownSVal>(Val: Arg0Val)) |
2865 | return nullptr; |
2866 | DefinedOrUnknownSVal arg0Val = Arg0Val.castAs<DefinedOrUnknownSVal>(); |
2867 | |
2868 | SValBuilder &svalBuilder = C.getSValBuilder(); |
2869 | |
2870 | DefinedOrUnknownSVal PtrEQ = svalBuilder.evalEQ( |
2871 | state: State, lhs: arg0Val, rhs: svalBuilder.makeNullWithType(type: arg0Expr->getType())); |
2872 | |
2873 | // Get the size argument. |
2874 | const Expr *Arg1 = CE->getArg(Arg: 1); |
2875 | |
2876 | // Get the value of the size argument. |
2877 | SVal TotalSize = C.getSVal(S: Arg1); |
2878 | if (SuffixWithN) |
2879 | TotalSize = evalMulForBufferSize(C, Blocks: Arg1, BlockBytes: CE->getArg(Arg: 2)); |
2880 | if (!isa<DefinedOrUnknownSVal>(Val: TotalSize)) |
2881 | return nullptr; |
2882 | |
2883 | // Compare the size argument to 0. |
2884 | DefinedOrUnknownSVal SizeZero = |
2885 | svalBuilder.evalEQ(state: State, lhs: TotalSize.castAs<DefinedOrUnknownSVal>(), |
2886 | rhs: svalBuilder.makeIntValWithWidth( |
2887 | ptrType: svalBuilder.getContext().getSizeType(), integer: 0)); |
2888 | |
2889 | ProgramStateRef StatePtrIsNull, StatePtrNotNull; |
2890 | std::tie(args&: StatePtrIsNull, args&: StatePtrNotNull) = State->assume(Cond: PtrEQ); |
2891 | ProgramStateRef StateSizeIsZero, StateSizeNotZero; |
2892 | std::tie(args&: StateSizeIsZero, args&: StateSizeNotZero) = State->assume(Cond: SizeZero); |
2893 | // We only assume exceptional states if they are definitely true; if the |
2894 | // state is under-constrained, assume regular realloc behavior. |
2895 | bool PrtIsNull = StatePtrIsNull && !StatePtrNotNull; |
2896 | bool SizeIsZero = StateSizeIsZero && !StateSizeNotZero; |
2897 | |
2898 | // If the ptr is NULL and the size is not 0, the call is equivalent to |
2899 | // malloc(size). |
2900 | if (PrtIsNull && !SizeIsZero) { |
2901 | ProgramStateRef stateMalloc = MallocMemAux( |
2902 | C, Call, Size: TotalSize, Init: UndefinedVal(), State: StatePtrIsNull, Family); |
2903 | return stateMalloc; |
2904 | } |
2905 | |
2906 | // Proccess as allocation of 0 bytes. |
2907 | if (PrtIsNull && SizeIsZero) |
2908 | return State; |
2909 | |
2910 | assert(!PrtIsNull); |
2911 | |
2912 | bool IsKnownToBeAllocated = false; |
2913 | |
2914 | // If the size is 0, free the memory. |
2915 | if (SizeIsZero) |
2916 | // The semantics of the return value are: |
2917 | // If size was equal to 0, either NULL or a pointer suitable to be passed |
2918 | // to free() is returned. We just free the input pointer and do not add |
2919 | // any constrains on the output pointer. |
2920 | if (ProgramStateRef stateFree = FreeMemAux( |
2921 | C, Call, State: StateSizeIsZero, Num: 0, Hold: false, IsKnownToBeAllocated, Family)) |
2922 | return stateFree; |
2923 | |
2924 | // Default behavior. |
2925 | if (ProgramStateRef stateFree = |
2926 | FreeMemAux(C, Call, State, Num: 0, Hold: false, IsKnownToBeAllocated, Family)) { |
2927 | |
2928 | ProgramStateRef stateRealloc = |
2929 | MallocMemAux(C, Call, Size: TotalSize, Init: UnknownVal(), State: stateFree, Family); |
2930 | if (!stateRealloc) |
2931 | return nullptr; |
2932 | |
2933 | OwnershipAfterReallocKind Kind = OAR_ToBeFreedAfterFailure; |
2934 | if (ShouldFreeOnFail) |
2935 | Kind = OAR_FreeOnFailure; |
2936 | else if (!IsKnownToBeAllocated) |
2937 | Kind = OAR_DoNotTrackAfterFailure; |
2938 | |
2939 | // Get the from and to pointer symbols as in toPtr = realloc(fromPtr, size). |
2940 | SymbolRef FromPtr = arg0Val.getLocSymbolInBase(); |
2941 | SVal RetVal = stateRealloc->getSVal(Ex: CE, LCtx: C.getLocationContext()); |
2942 | SymbolRef ToPtr = RetVal.getAsSymbol(); |
2943 | assert(FromPtr && ToPtr && |
2944 | "By this point, FreeMemAux and MallocMemAux should have checked " |
2945 | "whether the argument or the return value is symbolic!" ); |
2946 | |
2947 | // Record the info about the reallocated symbol so that we could properly |
2948 | // process failed reallocation. |
2949 | stateRealloc = stateRealloc->set<ReallocPairs>(K: ToPtr, |
2950 | E: ReallocPair(FromPtr, Kind)); |
2951 | // The reallocated symbol should stay alive for as long as the new symbol. |
2952 | C.getSymbolManager().addSymbolDependency(Primary: ToPtr, Dependent: FromPtr); |
2953 | return stateRealloc; |
2954 | } |
2955 | return nullptr; |
2956 | } |
2957 | |
2958 | ProgramStateRef MallocChecker::CallocMem(CheckerContext &C, |
2959 | const CallEvent &Call, |
2960 | ProgramStateRef State) const { |
2961 | if (!State) |
2962 | return nullptr; |
2963 | |
2964 | if (Call.getNumArgs() < 2) |
2965 | return nullptr; |
2966 | |
2967 | SValBuilder &svalBuilder = C.getSValBuilder(); |
2968 | SVal zeroVal = svalBuilder.makeZeroVal(type: svalBuilder.getContext().CharTy); |
2969 | SVal TotalSize = |
2970 | evalMulForBufferSize(C, Blocks: Call.getArgExpr(Index: 0), BlockBytes: Call.getArgExpr(Index: 1)); |
2971 | |
2972 | return MallocMemAux(C, Call, Size: TotalSize, Init: zeroVal, State, |
2973 | Family: AllocationFamily(AF_Malloc)); |
2974 | } |
2975 | |
2976 | MallocChecker::LeakInfo MallocChecker::getAllocationSite(const ExplodedNode *N, |
2977 | SymbolRef Sym, |
2978 | CheckerContext &C) { |
2979 | const LocationContext *LeakContext = N->getLocationContext(); |
2980 | // Walk the ExplodedGraph backwards and find the first node that referred to |
2981 | // the tracked symbol. |
2982 | const ExplodedNode *AllocNode = N; |
2983 | const MemRegion *ReferenceRegion = nullptr; |
2984 | |
2985 | while (N) { |
2986 | ProgramStateRef State = N->getState(); |
2987 | if (!State->get<RegionState>(key: Sym)) |
2988 | break; |
2989 | |
2990 | // Find the most recent expression bound to the symbol in the current |
2991 | // context. |
2992 | if (!ReferenceRegion) { |
2993 | if (const MemRegion *MR = C.getLocationRegionIfPostStore(N)) { |
2994 | SVal Val = State->getSVal(R: MR); |
2995 | if (Val.getAsLocSymbol() == Sym) { |
2996 | const VarRegion *VR = MR->getBaseRegion()->getAs<VarRegion>(); |
2997 | // Do not show local variables belonging to a function other than |
2998 | // where the error is reported. |
2999 | if (!VR || (VR->getStackFrame() == LeakContext->getStackFrame())) |
3000 | ReferenceRegion = MR; |
3001 | } |
3002 | } |
3003 | } |
3004 | |
3005 | // Allocation node, is the last node in the current or parent context in |
3006 | // which the symbol was tracked. |
3007 | const LocationContext *NContext = N->getLocationContext(); |
3008 | if (NContext == LeakContext || |
3009 | NContext->isParentOf(LC: LeakContext)) |
3010 | AllocNode = N; |
3011 | N = N->pred_empty() ? nullptr : *(N->pred_begin()); |
3012 | } |
3013 | |
3014 | return LeakInfo(AllocNode, ReferenceRegion); |
3015 | } |
3016 | |
3017 | void MallocChecker::HandleLeak(SymbolRef Sym, ExplodedNode *N, |
3018 | CheckerContext &C) const { |
3019 | |
3020 | if (!ChecksEnabled[CK_MallocChecker] && |
3021 | !ChecksEnabled[CK_NewDeleteLeaksChecker]) |
3022 | return; |
3023 | |
3024 | const RefState *RS = C.getState()->get<RegionState>(key: Sym); |
3025 | assert(RS && "cannot leak an untracked symbol" ); |
3026 | AllocationFamily Family = RS->getAllocationFamily(); |
3027 | |
3028 | if (Family.Kind == AF_Alloca) |
3029 | return; |
3030 | |
3031 | std::optional<MallocChecker::CheckKind> CheckKind = |
3032 | getCheckIfTracked(Family, IsALeakCheck: true); |
3033 | |
3034 | if (!CheckKind) |
3035 | return; |
3036 | |
3037 | assert(N); |
3038 | if (!BT_Leak[*CheckKind]) { |
3039 | // Leaks should not be reported if they are post-dominated by a sink: |
3040 | // (1) Sinks are higher importance bugs. |
3041 | // (2) NoReturnFunctionChecker uses sink nodes to represent paths ending |
3042 | // with __noreturn functions such as assert() or exit(). We choose not |
3043 | // to report leaks on such paths. |
3044 | BT_Leak[*CheckKind].reset(p: new BugType(CheckNames[*CheckKind], "Memory leak" , |
3045 | categories::MemoryError, |
3046 | /*SuppressOnSink=*/true)); |
3047 | } |
3048 | |
3049 | // Most bug reports are cached at the location where they occurred. |
3050 | // With leaks, we want to unique them by the location where they were |
3051 | // allocated, and only report a single path. |
3052 | PathDiagnosticLocation LocUsedForUniqueing; |
3053 | const ExplodedNode *AllocNode = nullptr; |
3054 | const MemRegion *Region = nullptr; |
3055 | std::tie(args&: AllocNode, args&: Region) = getAllocationSite(N, Sym, C); |
3056 | |
3057 | const Stmt *AllocationStmt = AllocNode->getStmtForDiagnostics(); |
3058 | if (AllocationStmt) |
3059 | LocUsedForUniqueing = PathDiagnosticLocation::createBegin(S: AllocationStmt, |
3060 | SM: C.getSourceManager(), |
3061 | LAC: AllocNode->getLocationContext()); |
3062 | |
3063 | SmallString<200> buf; |
3064 | llvm::raw_svector_ostream os(buf); |
3065 | if (Region && Region->canPrintPretty()) { |
3066 | os << "Potential leak of memory pointed to by " ; |
3067 | Region->printPretty(os); |
3068 | } else { |
3069 | os << "Potential memory leak" ; |
3070 | } |
3071 | |
3072 | auto R = std::make_unique<PathSensitiveBugReport>( |
3073 | args&: *BT_Leak[*CheckKind], args: os.str(), args&: N, args&: LocUsedForUniqueing, |
3074 | args: AllocNode->getLocationContext()->getDecl()); |
3075 | R->markInteresting(sym: Sym); |
3076 | R->addVisitor<MallocBugVisitor>(ConstructorArgs&: Sym, ConstructorArgs: true); |
3077 | if (ShouldRegisterNoOwnershipChangeVisitor) |
3078 | R->addVisitor<NoMemOwnershipChangeVisitor>(ConstructorArgs&: Sym, ConstructorArgs: this); |
3079 | C.emitReport(R: std::move(R)); |
3080 | } |
3081 | |
3082 | void MallocChecker::checkDeadSymbols(SymbolReaper &SymReaper, |
3083 | CheckerContext &C) const |
3084 | { |
3085 | ProgramStateRef state = C.getState(); |
3086 | RegionStateTy OldRS = state->get<RegionState>(); |
3087 | RegionStateTy::Factory &F = state->get_context<RegionState>(); |
3088 | |
3089 | RegionStateTy RS = OldRS; |
3090 | SmallVector<SymbolRef, 2> Errors; |
3091 | for (auto [Sym, State] : RS) { |
3092 | if (SymReaper.isDead(sym: Sym)) { |
3093 | if (State.isAllocated() || State.isAllocatedOfSizeZero()) |
3094 | Errors.push_back(Elt: Sym); |
3095 | // Remove the dead symbol from the map. |
3096 | RS = F.remove(Old: RS, K: Sym); |
3097 | } |
3098 | } |
3099 | |
3100 | if (RS == OldRS) { |
3101 | // We shouldn't have touched other maps yet. |
3102 | assert(state->get<ReallocPairs>() == |
3103 | C.getState()->get<ReallocPairs>()); |
3104 | assert(state->get<FreeReturnValue>() == |
3105 | C.getState()->get<FreeReturnValue>()); |
3106 | return; |
3107 | } |
3108 | |
3109 | // Cleanup the Realloc Pairs Map. |
3110 | ReallocPairsTy RP = state->get<ReallocPairs>(); |
3111 | for (auto [Sym, ReallocPair] : RP) { |
3112 | if (SymReaper.isDead(sym: Sym) || SymReaper.isDead(sym: ReallocPair.ReallocatedSym)) { |
3113 | state = state->remove<ReallocPairs>(K: Sym); |
3114 | } |
3115 | } |
3116 | |
3117 | // Cleanup the FreeReturnValue Map. |
3118 | FreeReturnValueTy FR = state->get<FreeReturnValue>(); |
3119 | for (auto [Sym, RetSym] : FR) { |
3120 | if (SymReaper.isDead(sym: Sym) || SymReaper.isDead(sym: RetSym)) { |
3121 | state = state->remove<FreeReturnValue>(K: Sym); |
3122 | } |
3123 | } |
3124 | |
3125 | // Generate leak node. |
3126 | ExplodedNode *N = C.getPredecessor(); |
3127 | if (!Errors.empty()) { |
3128 | N = C.generateNonFatalErrorNode(State: C.getState()); |
3129 | if (N) { |
3130 | for (SymbolRef Sym : Errors) { |
3131 | HandleLeak(Sym, N, C); |
3132 | } |
3133 | } |
3134 | } |
3135 | |
3136 | C.addTransition(State: state->set<RegionState>(RS), Pred: N); |
3137 | } |
3138 | |
3139 | void MallocChecker::checkPostCall(const CallEvent &Call, |
3140 | CheckerContext &C) const { |
3141 | if (const auto *PostFN = PostFnMap.lookup(Call)) { |
3142 | (*PostFN)(this, C.getState(), Call, C); |
3143 | return; |
3144 | } |
3145 | } |
3146 | |
3147 | void MallocChecker::checkPreCall(const CallEvent &Call, |
3148 | CheckerContext &C) const { |
3149 | |
3150 | if (const auto *DC = dyn_cast<CXXDeallocatorCall>(Val: &Call)) { |
3151 | const CXXDeleteExpr *DE = DC->getOriginExpr(); |
3152 | |
3153 | if (!ChecksEnabled[CK_NewDeleteChecker]) |
3154 | if (SymbolRef Sym = C.getSVal(S: DE->getArgument()).getAsSymbol()) |
3155 | checkUseAfterFree(Sym, C, S: DE->getArgument()); |
3156 | |
3157 | if (!isStandardNewDelete(FD: DC->getDecl())) |
3158 | return; |
3159 | |
3160 | ProgramStateRef State = C.getState(); |
3161 | bool IsKnownToBeAllocated; |
3162 | State = FreeMemAux( |
3163 | C, ArgExpr: DE->getArgument(), Call, State, |
3164 | /*Hold*/ false, IsKnownToBeAllocated, |
3165 | Family: AllocationFamily(DE->isArrayForm() ? AF_CXXNewArray : AF_CXXNew)); |
3166 | |
3167 | C.addTransition(State); |
3168 | return; |
3169 | } |
3170 | |
3171 | if (const auto *DC = dyn_cast<CXXDestructorCall>(Val: &Call)) { |
3172 | SymbolRef Sym = DC->getCXXThisVal().getAsSymbol(); |
3173 | if (!Sym || checkDoubleDelete(Sym, C)) |
3174 | return; |
3175 | } |
3176 | |
3177 | // We need to handle getline pre-conditions here before the pointed region |
3178 | // gets invalidated by StreamChecker |
3179 | if (const auto *PreFN = PreFnMap.lookup(Call)) { |
3180 | (*PreFN)(this, C.getState(), Call, C); |
3181 | return; |
3182 | } |
3183 | |
3184 | // We will check for double free in the post visit. |
3185 | if (const AnyFunctionCall *FC = dyn_cast<AnyFunctionCall>(Val: &Call)) { |
3186 | const FunctionDecl *FD = FC->getDecl(); |
3187 | if (!FD) |
3188 | return; |
3189 | |
3190 | if (ChecksEnabled[CK_MallocChecker] && isFreeingCall(Call)) |
3191 | return; |
3192 | } |
3193 | |
3194 | // Check if the callee of a method is deleted. |
3195 | if (const CXXInstanceCall *CC = dyn_cast<CXXInstanceCall>(Val: &Call)) { |
3196 | SymbolRef Sym = CC->getCXXThisVal().getAsSymbol(); |
3197 | if (!Sym || checkUseAfterFree(Sym, C, S: CC->getCXXThisExpr())) |
3198 | return; |
3199 | } |
3200 | |
3201 | // Check arguments for being used after free. |
3202 | for (unsigned I = 0, E = Call.getNumArgs(); I != E; ++I) { |
3203 | SVal ArgSVal = Call.getArgSVal(Index: I); |
3204 | if (isa<Loc>(Val: ArgSVal)) { |
3205 | SymbolRef Sym = ArgSVal.getAsSymbol(); |
3206 | if (!Sym) |
3207 | continue; |
3208 | if (checkUseAfterFree(Sym, C, S: Call.getArgExpr(Index: I))) |
3209 | return; |
3210 | } |
3211 | } |
3212 | } |
3213 | |
3214 | void MallocChecker::checkPreStmt(const ReturnStmt *S, |
3215 | CheckerContext &C) const { |
3216 | checkEscapeOnReturn(S, C); |
3217 | } |
3218 | |
3219 | // In the CFG, automatic destructors come after the return statement. |
3220 | // This callback checks for returning memory that is freed by automatic |
3221 | // destructors, as those cannot be reached in checkPreStmt(). |
3222 | void MallocChecker::checkEndFunction(const ReturnStmt *S, |
3223 | CheckerContext &C) const { |
3224 | checkEscapeOnReturn(S, C); |
3225 | } |
3226 | |
3227 | void MallocChecker::checkEscapeOnReturn(const ReturnStmt *S, |
3228 | CheckerContext &C) const { |
3229 | if (!S) |
3230 | return; |
3231 | |
3232 | const Expr *E = S->getRetValue(); |
3233 | if (!E) |
3234 | return; |
3235 | |
3236 | // Check if we are returning a symbol. |
3237 | ProgramStateRef State = C.getState(); |
3238 | SVal RetVal = C.getSVal(S: E); |
3239 | SymbolRef Sym = RetVal.getAsSymbol(); |
3240 | if (!Sym) |
3241 | // If we are returning a field of the allocated struct or an array element, |
3242 | // the callee could still free the memory. |
3243 | // TODO: This logic should be a part of generic symbol escape callback. |
3244 | if (const MemRegion *MR = RetVal.getAsRegion()) |
3245 | if (isa<FieldRegion, ElementRegion>(Val: MR)) |
3246 | if (const SymbolicRegion *BMR = |
3247 | dyn_cast<SymbolicRegion>(Val: MR->getBaseRegion())) |
3248 | Sym = BMR->getSymbol(); |
3249 | |
3250 | // Check if we are returning freed memory. |
3251 | if (Sym) |
3252 | checkUseAfterFree(Sym, C, S: E); |
3253 | } |
3254 | |
3255 | // TODO: Blocks should be either inlined or should call invalidate regions |
3256 | // upon invocation. After that's in place, special casing here will not be |
3257 | // needed. |
3258 | void MallocChecker::checkPostStmt(const BlockExpr *BE, |
3259 | CheckerContext &C) const { |
3260 | |
3261 | // Scan the BlockDecRefExprs for any object the retain count checker |
3262 | // may be tracking. |
3263 | if (!BE->getBlockDecl()->hasCaptures()) |
3264 | return; |
3265 | |
3266 | ProgramStateRef state = C.getState(); |
3267 | const BlockDataRegion *R = |
3268 | cast<BlockDataRegion>(Val: C.getSVal(S: BE).getAsRegion()); |
3269 | |
3270 | auto ReferencedVars = R->referenced_vars(); |
3271 | if (ReferencedVars.empty()) |
3272 | return; |
3273 | |
3274 | SmallVector<const MemRegion*, 10> Regions; |
3275 | const LocationContext *LC = C.getLocationContext(); |
3276 | MemRegionManager &MemMgr = C.getSValBuilder().getRegionManager(); |
3277 | |
3278 | for (const auto &Var : ReferencedVars) { |
3279 | const VarRegion *VR = Var.getCapturedRegion(); |
3280 | if (VR->getSuperRegion() == R) { |
3281 | VR = MemMgr.getVarRegion(VD: VR->getDecl(), LC); |
3282 | } |
3283 | Regions.push_back(Elt: VR); |
3284 | } |
3285 | |
3286 | state = |
3287 | state->scanReachableSymbols<StopTrackingCallback>(Reachable: Regions).getState(); |
3288 | C.addTransition(State: state); |
3289 | } |
3290 | |
3291 | static bool isReleased(SymbolRef Sym, CheckerContext &C) { |
3292 | assert(Sym); |
3293 | const RefState *RS = C.getState()->get<RegionState>(key: Sym); |
3294 | return (RS && RS->isReleased()); |
3295 | } |
3296 | |
3297 | bool MallocChecker::suppressDeallocationsInSuspiciousContexts( |
3298 | const CallEvent &Call, CheckerContext &C) const { |
3299 | if (Call.getNumArgs() == 0) |
3300 | return false; |
3301 | |
3302 | StringRef FunctionStr = "" ; |
3303 | if (const auto *FD = dyn_cast<FunctionDecl>(Val: C.getStackFrame()->getDecl())) |
3304 | if (const Stmt *Body = FD->getBody()) |
3305 | if (Body->getBeginLoc().isValid()) |
3306 | FunctionStr = |
3307 | Lexer::getSourceText(Range: CharSourceRange::getTokenRange( |
3308 | R: {FD->getBeginLoc(), Body->getBeginLoc()}), |
3309 | SM: C.getSourceManager(), LangOpts: C.getLangOpts()); |
3310 | |
3311 | // We do not model the Integer Set Library's retain-count based allocation. |
3312 | if (!FunctionStr.contains(Other: "__isl_" )) |
3313 | return false; |
3314 | |
3315 | ProgramStateRef State = C.getState(); |
3316 | |
3317 | for (const Expr *Arg : cast<CallExpr>(Val: Call.getOriginExpr())->arguments()) |
3318 | if (SymbolRef Sym = C.getSVal(S: Arg).getAsSymbol()) |
3319 | if (const RefState *RS = State->get<RegionState>(key: Sym)) |
3320 | State = State->set<RegionState>(K: Sym, E: RefState::getEscaped(RS)); |
3321 | |
3322 | C.addTransition(State); |
3323 | return true; |
3324 | } |
3325 | |
3326 | bool MallocChecker::checkUseAfterFree(SymbolRef Sym, CheckerContext &C, |
3327 | const Stmt *S) const { |
3328 | |
3329 | if (isReleased(Sym, C)) { |
3330 | HandleUseAfterFree(C, Range: S->getSourceRange(), Sym); |
3331 | return true; |
3332 | } |
3333 | |
3334 | return false; |
3335 | } |
3336 | |
3337 | void MallocChecker::checkUseZeroAllocated(SymbolRef Sym, CheckerContext &C, |
3338 | const Stmt *S) const { |
3339 | assert(Sym); |
3340 | |
3341 | if (const RefState *RS = C.getState()->get<RegionState>(key: Sym)) { |
3342 | if (RS->isAllocatedOfSizeZero()) |
3343 | HandleUseZeroAlloc(C, Range: RS->getStmt()->getSourceRange(), Sym); |
3344 | } |
3345 | else if (C.getState()->contains<ReallocSizeZeroSymbols>(key: Sym)) { |
3346 | HandleUseZeroAlloc(C, Range: S->getSourceRange(), Sym); |
3347 | } |
3348 | } |
3349 | |
3350 | bool MallocChecker::checkDoubleDelete(SymbolRef Sym, CheckerContext &C) const { |
3351 | |
3352 | if (isReleased(Sym, C)) { |
3353 | HandleDoubleDelete(C, Sym); |
3354 | return true; |
3355 | } |
3356 | return false; |
3357 | } |
3358 | |
3359 | // Check if the location is a freed symbolic region. |
3360 | void MallocChecker::checkLocation(SVal l, bool isLoad, const Stmt *S, |
3361 | CheckerContext &C) const { |
3362 | SymbolRef Sym = l.getLocSymbolInBase(); |
3363 | if (Sym) { |
3364 | checkUseAfterFree(Sym, C, S); |
3365 | checkUseZeroAllocated(Sym, C, S); |
3366 | } |
3367 | } |
3368 | |
3369 | // If a symbolic region is assumed to NULL (or another constant), stop tracking |
3370 | // it - assuming that allocation failed on this path. |
3371 | ProgramStateRef MallocChecker::evalAssume(ProgramStateRef state, |
3372 | SVal Cond, |
3373 | bool Assumption) const { |
3374 | RegionStateTy RS = state->get<RegionState>(); |
3375 | for (SymbolRef Sym : llvm::make_first_range(c&: RS)) { |
3376 | // If the symbol is assumed to be NULL, remove it from consideration. |
3377 | ConstraintManager &CMgr = state->getConstraintManager(); |
3378 | ConditionTruthVal AllocFailed = CMgr.isNull(State: state, Sym); |
3379 | if (AllocFailed.isConstrainedTrue()) |
3380 | state = state->remove<RegionState>(K: Sym); |
3381 | } |
3382 | |
3383 | // Realloc returns 0 when reallocation fails, which means that we should |
3384 | // restore the state of the pointer being reallocated. |
3385 | ReallocPairsTy RP = state->get<ReallocPairs>(); |
3386 | for (auto [Sym, ReallocPair] : RP) { |
3387 | // If the symbol is assumed to be NULL, remove it from consideration. |
3388 | ConstraintManager &CMgr = state->getConstraintManager(); |
3389 | ConditionTruthVal AllocFailed = CMgr.isNull(State: state, Sym); |
3390 | if (!AllocFailed.isConstrainedTrue()) |
3391 | continue; |
3392 | |
3393 | SymbolRef ReallocSym = ReallocPair.ReallocatedSym; |
3394 | if (const RefState *RS = state->get<RegionState>(key: ReallocSym)) { |
3395 | if (RS->isReleased()) { |
3396 | switch (ReallocPair.Kind) { |
3397 | case OAR_ToBeFreedAfterFailure: |
3398 | state = state->set<RegionState>(K: ReallocSym, |
3399 | E: RefState::getAllocated(family: RS->getAllocationFamily(), s: RS->getStmt())); |
3400 | break; |
3401 | case OAR_DoNotTrackAfterFailure: |
3402 | state = state->remove<RegionState>(K: ReallocSym); |
3403 | break; |
3404 | default: |
3405 | assert(ReallocPair.Kind == OAR_FreeOnFailure); |
3406 | } |
3407 | } |
3408 | } |
3409 | state = state->remove<ReallocPairs>(K: Sym); |
3410 | } |
3411 | |
3412 | return state; |
3413 | } |
3414 | |
3415 | bool MallocChecker::mayFreeAnyEscapedMemoryOrIsModeledExplicitly( |
3416 | const CallEvent *Call, |
3417 | ProgramStateRef State, |
3418 | SymbolRef &EscapingSymbol) const { |
3419 | assert(Call); |
3420 | EscapingSymbol = nullptr; |
3421 | |
3422 | // For now, assume that any C++ or block call can free memory. |
3423 | // TODO: If we want to be more optimistic here, we'll need to make sure that |
3424 | // regions escape to C++ containers. They seem to do that even now, but for |
3425 | // mysterious reasons. |
3426 | if (!isa<SimpleFunctionCall, ObjCMethodCall>(Val: Call)) |
3427 | return true; |
3428 | |
3429 | // Check Objective-C messages by selector name. |
3430 | if (const ObjCMethodCall *Msg = dyn_cast<ObjCMethodCall>(Val: Call)) { |
3431 | // If it's not a framework call, or if it takes a callback, assume it |
3432 | // can free memory. |
3433 | if (!Call->isInSystemHeader() || Call->argumentsMayEscape()) |
3434 | return true; |
3435 | |
3436 | // If it's a method we know about, handle it explicitly post-call. |
3437 | // This should happen before the "freeWhenDone" check below. |
3438 | if (isKnownDeallocObjCMethodName(Call: *Msg)) |
3439 | return false; |
3440 | |
3441 | // If there's a "freeWhenDone" parameter, but the method isn't one we know |
3442 | // about, we can't be sure that the object will use free() to deallocate the |
3443 | // memory, so we can't model it explicitly. The best we can do is use it to |
3444 | // decide whether the pointer escapes. |
3445 | if (std::optional<bool> FreeWhenDone = getFreeWhenDoneArg(Call: *Msg)) |
3446 | return *FreeWhenDone; |
3447 | |
3448 | // If the first selector piece ends with "NoCopy", and there is no |
3449 | // "freeWhenDone" parameter set to zero, we know ownership is being |
3450 | // transferred. Again, though, we can't be sure that the object will use |
3451 | // free() to deallocate the memory, so we can't model it explicitly. |
3452 | StringRef FirstSlot = Msg->getSelector().getNameForSlot(argIndex: 0); |
3453 | if (FirstSlot.ends_with(Suffix: "NoCopy" )) |
3454 | return true; |
3455 | |
3456 | // If the first selector starts with addPointer, insertPointer, |
3457 | // or replacePointer, assume we are dealing with NSPointerArray or similar. |
3458 | // This is similar to C++ containers (vector); we still might want to check |
3459 | // that the pointers get freed by following the container itself. |
3460 | if (FirstSlot.starts_with(Prefix: "addPointer" ) || |
3461 | FirstSlot.starts_with(Prefix: "insertPointer" ) || |
3462 | FirstSlot.starts_with(Prefix: "replacePointer" ) || |
3463 | FirstSlot == "valueWithPointer" ) { |
3464 | return true; |
3465 | } |
3466 | |
3467 | // We should escape receiver on call to 'init'. This is especially relevant |
3468 | // to the receiver, as the corresponding symbol is usually not referenced |
3469 | // after the call. |
3470 | if (Msg->getMethodFamily() == OMF_init) { |
3471 | EscapingSymbol = Msg->getReceiverSVal().getAsSymbol(); |
3472 | return true; |
3473 | } |
3474 | |
3475 | // Otherwise, assume that the method does not free memory. |
3476 | // Most framework methods do not free memory. |
3477 | return false; |
3478 | } |
3479 | |
3480 | // At this point the only thing left to handle is straight function calls. |
3481 | const FunctionDecl *FD = cast<SimpleFunctionCall>(Val: Call)->getDecl(); |
3482 | if (!FD) |
3483 | return true; |
3484 | |
3485 | // If it's one of the allocation functions we can reason about, we model |
3486 | // its behavior explicitly. |
3487 | if (isMemCall(Call: *Call)) |
3488 | return false; |
3489 | |
3490 | // If it's not a system call, assume it frees memory. |
3491 | if (!Call->isInSystemHeader()) |
3492 | return true; |
3493 | |
3494 | // White list the system functions whose arguments escape. |
3495 | const IdentifierInfo *II = FD->getIdentifier(); |
3496 | if (!II) |
3497 | return true; |
3498 | StringRef FName = II->getName(); |
3499 | |
3500 | // White list the 'XXXNoCopy' CoreFoundation functions. |
3501 | // We specifically check these before |
3502 | if (FName.ends_with(Suffix: "NoCopy" )) { |
3503 | // Look for the deallocator argument. We know that the memory ownership |
3504 | // is not transferred only if the deallocator argument is |
3505 | // 'kCFAllocatorNull'. |
3506 | for (unsigned i = 1; i < Call->getNumArgs(); ++i) { |
3507 | const Expr *ArgE = Call->getArgExpr(Index: i)->IgnoreParenCasts(); |
3508 | if (const DeclRefExpr *DE = dyn_cast<DeclRefExpr>(Val: ArgE)) { |
3509 | StringRef DeallocatorName = DE->getFoundDecl()->getName(); |
3510 | if (DeallocatorName == "kCFAllocatorNull" ) |
3511 | return false; |
3512 | } |
3513 | } |
3514 | return true; |
3515 | } |
3516 | |
3517 | // Associating streams with malloced buffers. The pointer can escape if |
3518 | // 'closefn' is specified (and if that function does free memory), |
3519 | // but it will not if closefn is not specified. |
3520 | // Currently, we do not inspect the 'closefn' function (PR12101). |
3521 | if (FName == "funopen" ) |
3522 | if (Call->getNumArgs() >= 4 && Call->getArgSVal(Index: 4).isConstant(I: 0)) |
3523 | return false; |
3524 | |
3525 | // Do not warn on pointers passed to 'setbuf' when used with std streams, |
3526 | // these leaks might be intentional when setting the buffer for stdio. |
3527 | // http://stackoverflow.com/questions/2671151/who-frees-setvbuf-buffer |
3528 | if (FName == "setbuf" || FName =="setbuffer" || |
3529 | FName == "setlinebuf" || FName == "setvbuf" ) { |
3530 | if (Call->getNumArgs() >= 1) { |
3531 | const Expr *ArgE = Call->getArgExpr(Index: 0)->IgnoreParenCasts(); |
3532 | if (const DeclRefExpr *ArgDRE = dyn_cast<DeclRefExpr>(Val: ArgE)) |
3533 | if (const VarDecl *D = dyn_cast<VarDecl>(Val: ArgDRE->getDecl())) |
3534 | if (D->getCanonicalDecl()->getName().contains(Other: "std" )) |
3535 | return true; |
3536 | } |
3537 | } |
3538 | |
3539 | // A bunch of other functions which either take ownership of a pointer or |
3540 | // wrap the result up in a struct or object, meaning it can be freed later. |
3541 | // (See RetainCountChecker.) Not all the parameters here are invalidated, |
3542 | // but the Malloc checker cannot differentiate between them. The right way |
3543 | // of doing this would be to implement a pointer escapes callback. |
3544 | if (FName == "CGBitmapContextCreate" || |
3545 | FName == "CGBitmapContextCreateWithData" || |
3546 | FName == "CVPixelBufferCreateWithBytes" || |
3547 | FName == "CVPixelBufferCreateWithPlanarBytes" || |
3548 | FName == "OSAtomicEnqueue" ) { |
3549 | return true; |
3550 | } |
3551 | |
3552 | if (FName == "postEvent" && |
3553 | FD->getQualifiedNameAsString() == "QCoreApplication::postEvent" ) { |
3554 | return true; |
3555 | } |
3556 | |
3557 | if (FName == "connectImpl" && |
3558 | FD->getQualifiedNameAsString() == "QObject::connectImpl" ) { |
3559 | return true; |
3560 | } |
3561 | |
3562 | if (FName == "singleShotImpl" && |
3563 | FD->getQualifiedNameAsString() == "QTimer::singleShotImpl" ) { |
3564 | return true; |
3565 | } |
3566 | |
3567 | // Handle cases where we know a buffer's /address/ can escape. |
3568 | // Note that the above checks handle some special cases where we know that |
3569 | // even though the address escapes, it's still our responsibility to free the |
3570 | // buffer. |
3571 | if (Call->argumentsMayEscape()) |
3572 | return true; |
3573 | |
3574 | // Otherwise, assume that the function does not free memory. |
3575 | // Most system calls do not free the memory. |
3576 | return false; |
3577 | } |
3578 | |
3579 | ProgramStateRef MallocChecker::checkPointerEscape(ProgramStateRef State, |
3580 | const InvalidatedSymbols &Escaped, |
3581 | const CallEvent *Call, |
3582 | PointerEscapeKind Kind) const { |
3583 | return checkPointerEscapeAux(State, Escaped, Call, Kind, |
3584 | /*IsConstPointerEscape*/ false); |
3585 | } |
3586 | |
3587 | ProgramStateRef MallocChecker::checkConstPointerEscape(ProgramStateRef State, |
3588 | const InvalidatedSymbols &Escaped, |
3589 | const CallEvent *Call, |
3590 | PointerEscapeKind Kind) const { |
3591 | // If a const pointer escapes, it may not be freed(), but it could be deleted. |
3592 | return checkPointerEscapeAux(State, Escaped, Call, Kind, |
3593 | /*IsConstPointerEscape*/ true); |
3594 | } |
3595 | |
3596 | static bool checkIfNewOrNewArrayFamily(const RefState *RS) { |
3597 | return (RS->getAllocationFamily().Kind == AF_CXXNewArray || |
3598 | RS->getAllocationFamily().Kind == AF_CXXNew); |
3599 | } |
3600 | |
3601 | ProgramStateRef MallocChecker::checkPointerEscapeAux( |
3602 | ProgramStateRef State, const InvalidatedSymbols &Escaped, |
3603 | const CallEvent *Call, PointerEscapeKind Kind, |
3604 | bool IsConstPointerEscape) const { |
3605 | // If we know that the call does not free memory, or we want to process the |
3606 | // call later, keep tracking the top level arguments. |
3607 | SymbolRef EscapingSymbol = nullptr; |
3608 | if (Kind == PSK_DirectEscapeOnCall && |
3609 | !mayFreeAnyEscapedMemoryOrIsModeledExplicitly(Call, State, |
3610 | EscapingSymbol) && |
3611 | !EscapingSymbol) { |
3612 | return State; |
3613 | } |
3614 | |
3615 | for (SymbolRef sym : Escaped) { |
3616 | if (EscapingSymbol && EscapingSymbol != sym) |
3617 | continue; |
3618 | |
3619 | if (const RefState *RS = State->get<RegionState>(key: sym)) |
3620 | if (RS->isAllocated() || RS->isAllocatedOfSizeZero()) |
3621 | if (!IsConstPointerEscape || checkIfNewOrNewArrayFamily(RS)) |
3622 | State = State->set<RegionState>(K: sym, E: RefState::getEscaped(RS)); |
3623 | } |
3624 | return State; |
3625 | } |
3626 | |
3627 | bool MallocChecker::isArgZERO_SIZE_PTR(ProgramStateRef State, CheckerContext &C, |
3628 | SVal ArgVal) const { |
3629 | if (!KernelZeroSizePtrValue) |
3630 | KernelZeroSizePtrValue = |
3631 | tryExpandAsInteger(Macro: "ZERO_SIZE_PTR" , PP: C.getPreprocessor()); |
3632 | |
3633 | const llvm::APSInt *ArgValKnown = |
3634 | C.getSValBuilder().getKnownValue(state: State, val: ArgVal); |
3635 | return ArgValKnown && *KernelZeroSizePtrValue && |
3636 | ArgValKnown->getSExtValue() == **KernelZeroSizePtrValue; |
3637 | } |
3638 | |
3639 | static SymbolRef findFailedReallocSymbol(ProgramStateRef currState, |
3640 | ProgramStateRef prevState) { |
3641 | ReallocPairsTy currMap = currState->get<ReallocPairs>(); |
3642 | ReallocPairsTy prevMap = prevState->get<ReallocPairs>(); |
3643 | |
3644 | for (const ReallocPairsTy::value_type &Pair : prevMap) { |
3645 | SymbolRef sym = Pair.first; |
3646 | if (!currMap.lookup(K: sym)) |
3647 | return sym; |
3648 | } |
3649 | |
3650 | return nullptr; |
3651 | } |
3652 | |
3653 | static bool isReferenceCountingPointerDestructor(const CXXDestructorDecl *DD) { |
3654 | if (const IdentifierInfo *II = DD->getParent()->getIdentifier()) { |
3655 | StringRef N = II->getName(); |
3656 | if (N.contains_insensitive(Other: "ptr" ) || N.contains_insensitive(Other: "pointer" )) { |
3657 | if (N.contains_insensitive(Other: "ref" ) || N.contains_insensitive(Other: "cnt" ) || |
3658 | N.contains_insensitive(Other: "intrusive" ) || |
3659 | N.contains_insensitive(Other: "shared" ) || N.ends_with_insensitive(Suffix: "rc" )) { |
3660 | return true; |
3661 | } |
3662 | } |
3663 | } |
3664 | return false; |
3665 | } |
3666 | |
3667 | PathDiagnosticPieceRef MallocBugVisitor::VisitNode(const ExplodedNode *N, |
3668 | BugReporterContext &BRC, |
3669 | PathSensitiveBugReport &BR) { |
3670 | ProgramStateRef state = N->getState(); |
3671 | ProgramStateRef statePrev = N->getFirstPred()->getState(); |
3672 | |
3673 | const RefState *RSCurr = state->get<RegionState>(key: Sym); |
3674 | const RefState *RSPrev = statePrev->get<RegionState>(key: Sym); |
3675 | |
3676 | const Stmt *S = N->getStmtForDiagnostics(); |
3677 | // When dealing with containers, we sometimes want to give a note |
3678 | // even if the statement is missing. |
3679 | if (!S && (!RSCurr || RSCurr->getAllocationFamily().Kind != AF_InnerBuffer)) |
3680 | return nullptr; |
3681 | |
3682 | const LocationContext *CurrentLC = N->getLocationContext(); |
3683 | |
3684 | // If we find an atomic fetch_add or fetch_sub within the function in which |
3685 | // the pointer was released (before the release), this is likely a release |
3686 | // point of reference-counted object (like shared pointer). |
3687 | // |
3688 | // Because we don't model atomics, and also because we don't know that the |
3689 | // original reference count is positive, we should not report use-after-frees |
3690 | // on objects deleted in such functions. This can probably be improved |
3691 | // through better shared pointer modeling. |
3692 | if (ReleaseFunctionLC && (ReleaseFunctionLC == CurrentLC || |
3693 | ReleaseFunctionLC->isParentOf(LC: CurrentLC))) { |
3694 | if (const auto *AE = dyn_cast<AtomicExpr>(Val: S)) { |
3695 | // Check for manual use of atomic builtins. |
3696 | AtomicExpr::AtomicOp Op = AE->getOp(); |
3697 | if (Op == AtomicExpr::AO__c11_atomic_fetch_add || |
3698 | Op == AtomicExpr::AO__c11_atomic_fetch_sub) { |
3699 | BR.markInvalid(Tag: getTag(), Data: S); |
3700 | // After report is considered invalid there is no need to proceed |
3701 | // futher. |
3702 | return nullptr; |
3703 | } |
3704 | } else if (const auto *CE = dyn_cast<CallExpr>(Val: S)) { |
3705 | // Check for `std::atomic` and such. This covers both regular method calls |
3706 | // and operator calls. |
3707 | if (const auto *MD = |
3708 | dyn_cast_or_null<CXXMethodDecl>(Val: CE->getDirectCallee())) { |
3709 | const CXXRecordDecl *RD = MD->getParent(); |
3710 | // A bit wobbly with ".contains()" because it may be like |
3711 | // "__atomic_base" or something. |
3712 | if (StringRef(RD->getNameAsString()).contains(Other: "atomic" )) { |
3713 | BR.markInvalid(Tag: getTag(), Data: S); |
3714 | // After report is considered invalid there is no need to proceed |
3715 | // futher. |
3716 | return nullptr; |
3717 | } |
3718 | } |
3719 | } |
3720 | } |
3721 | |
3722 | // FIXME: We will eventually need to handle non-statement-based events |
3723 | // (__attribute__((cleanup))). |
3724 | |
3725 | // Find out if this is an interesting point and what is the kind. |
3726 | StringRef Msg; |
3727 | std::unique_ptr<StackHintGeneratorForSymbol> StackHint = nullptr; |
3728 | SmallString<256> Buf; |
3729 | llvm::raw_svector_ostream OS(Buf); |
3730 | |
3731 | if (Mode == Normal) { |
3732 | if (isAllocated(RSCurr, RSPrev, Stmt: S)) { |
3733 | Msg = "Memory is allocated" ; |
3734 | StackHint = std::make_unique<StackHintGeneratorForSymbol>( |
3735 | args&: Sym, args: "Returned allocated memory" ); |
3736 | } else if (isReleased(RSCurr, RSPrev, Stmt: S)) { |
3737 | const auto Family = RSCurr->getAllocationFamily(); |
3738 | switch (Family.Kind) { |
3739 | case AF_Alloca: |
3740 | case AF_Malloc: |
3741 | case AF_Custom: |
3742 | case AF_CXXNew: |
3743 | case AF_CXXNewArray: |
3744 | case AF_IfNameIndex: |
3745 | Msg = "Memory is released" ; |
3746 | StackHint = std::make_unique<StackHintGeneratorForSymbol>( |
3747 | args&: Sym, args: "Returning; memory was released" ); |
3748 | break; |
3749 | case AF_InnerBuffer: { |
3750 | const MemRegion *ObjRegion = |
3751 | allocation_state::getContainerObjRegion(State: statePrev, Sym); |
3752 | const auto *TypedRegion = cast<TypedValueRegion>(Val: ObjRegion); |
3753 | QualType ObjTy = TypedRegion->getValueType(); |
3754 | OS << "Inner buffer of '" << ObjTy << "' " ; |
3755 | |
3756 | if (N->getLocation().getKind() == ProgramPoint::PostImplicitCallKind) { |
3757 | OS << "deallocated by call to destructor" ; |
3758 | StackHint = std::make_unique<StackHintGeneratorForSymbol>( |
3759 | args&: Sym, args: "Returning; inner buffer was deallocated" ); |
3760 | } else { |
3761 | OS << "reallocated by call to '" ; |
3762 | const Stmt *S = RSCurr->getStmt(); |
3763 | if (const auto *MemCallE = dyn_cast<CXXMemberCallExpr>(Val: S)) { |
3764 | OS << MemCallE->getMethodDecl()->getDeclName(); |
3765 | } else if (const auto *OpCallE = dyn_cast<CXXOperatorCallExpr>(Val: S)) { |
3766 | OS << OpCallE->getDirectCallee()->getDeclName(); |
3767 | } else if (const auto *CallE = dyn_cast<CallExpr>(Val: S)) { |
3768 | auto &CEMgr = BRC.getStateManager().getCallEventManager(); |
3769 | CallEventRef<> Call = |
3770 | CEMgr.getSimpleCall(E: CallE, State: state, LCtx: CurrentLC, ElemRef: {nullptr, 0}); |
3771 | if (const auto *D = dyn_cast_or_null<NamedDecl>(Val: Call->getDecl())) |
3772 | OS << D->getDeclName(); |
3773 | else |
3774 | OS << "unknown" ; |
3775 | } |
3776 | OS << "'" ; |
3777 | StackHint = std::make_unique<StackHintGeneratorForSymbol>( |
3778 | args&: Sym, args: "Returning; inner buffer was reallocated" ); |
3779 | } |
3780 | Msg = OS.str(); |
3781 | break; |
3782 | } |
3783 | case AF_None: |
3784 | assert(false && "Unhandled allocation family!" ); |
3785 | return nullptr; |
3786 | } |
3787 | |
3788 | // Save the first destructor/function as release point. |
3789 | assert(!ReleaseFunctionLC && "There should be only one release point" ); |
3790 | ReleaseFunctionLC = CurrentLC->getStackFrame(); |
3791 | |
3792 | // See if we're releasing memory while inlining a destructor that |
3793 | // decrement reference counters (or one of its callees). |
3794 | // This turns on various common false positive suppressions. |
3795 | for (const LocationContext *LC = CurrentLC; LC; LC = LC->getParent()) { |
3796 | if (const auto *DD = dyn_cast<CXXDestructorDecl>(Val: LC->getDecl())) { |
3797 | if (isReferenceCountingPointerDestructor(DD)) { |
3798 | // This immediately looks like a reference-counting destructor. |
3799 | // We're bad at guessing the original reference count of the |
3800 | // object, so suppress the report for now. |
3801 | BR.markInvalid(Tag: getTag(), Data: DD); |
3802 | |
3803 | // After report is considered invalid there is no need to proceed |
3804 | // futher. |
3805 | return nullptr; |
3806 | } |
3807 | |
3808 | // Switch suspection to outer destructor to catch patterns like: |
3809 | // (note that class name is distorted to bypass |
3810 | // isReferenceCountingPointerDestructor() logic) |
3811 | // |
3812 | // SmartPointr::~SmartPointr() { |
3813 | // if (refcount.fetch_sub(1) == 1) |
3814 | // release_resources(); |
3815 | // } |
3816 | // void SmartPointr::release_resources() { |
3817 | // free(buffer); |
3818 | // } |
3819 | // |
3820 | // This way ReleaseFunctionLC will point to outermost destructor and |
3821 | // it would be possible to catch wider range of FP. |
3822 | // |
3823 | // NOTE: it would be great to support smth like that in C, since |
3824 | // currently patterns like following won't be supressed: |
3825 | // |
3826 | // void doFree(struct Data *data) { free(data); } |
3827 | // void putData(struct Data *data) |
3828 | // { |
3829 | // if (refPut(data)) |
3830 | // doFree(data); |
3831 | // } |
3832 | ReleaseFunctionLC = LC->getStackFrame(); |
3833 | } |
3834 | } |
3835 | |
3836 | } else if (isRelinquished(RSCurr, RSPrev, Stmt: S)) { |
3837 | Msg = "Memory ownership is transferred" ; |
3838 | StackHint = std::make_unique<StackHintGeneratorForSymbol>(args&: Sym, args: "" ); |
3839 | } else if (hasReallocFailed(RSCurr, RSPrev, Stmt: S)) { |
3840 | Mode = ReallocationFailed; |
3841 | Msg = "Reallocation failed" ; |
3842 | StackHint = std::make_unique<StackHintGeneratorForReallocationFailed>( |
3843 | args&: Sym, args: "Reallocation failed" ); |
3844 | |
3845 | if (SymbolRef sym = findFailedReallocSymbol(currState: state, prevState: statePrev)) { |
3846 | // Is it possible to fail two reallocs WITHOUT testing in between? |
3847 | assert((!FailedReallocSymbol || FailedReallocSymbol == sym) && |
3848 | "We only support one failed realloc at a time." ); |
3849 | BR.markInteresting(sym); |
3850 | FailedReallocSymbol = sym; |
3851 | } |
3852 | } |
3853 | |
3854 | // We are in a special mode if a reallocation failed later in the path. |
3855 | } else if (Mode == ReallocationFailed) { |
3856 | assert(FailedReallocSymbol && "No symbol to look for." ); |
3857 | |
3858 | // Is this is the first appearance of the reallocated symbol? |
3859 | if (!statePrev->get<RegionState>(key: FailedReallocSymbol)) { |
3860 | // We're at the reallocation point. |
3861 | Msg = "Attempt to reallocate memory" ; |
3862 | StackHint = std::make_unique<StackHintGeneratorForSymbol>( |
3863 | args&: Sym, args: "Returned reallocated memory" ); |
3864 | FailedReallocSymbol = nullptr; |
3865 | Mode = Normal; |
3866 | } |
3867 | } |
3868 | |
3869 | if (Msg.empty()) { |
3870 | assert(!StackHint); |
3871 | return nullptr; |
3872 | } |
3873 | |
3874 | assert(StackHint); |
3875 | |
3876 | // Generate the extra diagnostic. |
3877 | PathDiagnosticLocation Pos; |
3878 | if (!S) { |
3879 | assert(RSCurr->getAllocationFamily().Kind == AF_InnerBuffer); |
3880 | auto PostImplCall = N->getLocation().getAs<PostImplicitCall>(); |
3881 | if (!PostImplCall) |
3882 | return nullptr; |
3883 | Pos = PathDiagnosticLocation(PostImplCall->getLocation(), |
3884 | BRC.getSourceManager()); |
3885 | } else { |
3886 | Pos = PathDiagnosticLocation(S, BRC.getSourceManager(), |
3887 | N->getLocationContext()); |
3888 | } |
3889 | |
3890 | auto P = std::make_shared<PathDiagnosticEventPiece>(args&: Pos, args&: Msg, args: true); |
3891 | BR.addCallStackHint(Piece: P, StackHint: std::move(StackHint)); |
3892 | return P; |
3893 | } |
3894 | |
3895 | void MallocChecker::printState(raw_ostream &Out, ProgramStateRef State, |
3896 | const char *NL, const char *Sep) const { |
3897 | |
3898 | RegionStateTy RS = State->get<RegionState>(); |
3899 | |
3900 | if (!RS.isEmpty()) { |
3901 | Out << Sep << "MallocChecker :" << NL; |
3902 | for (auto [Sym, Data] : RS) { |
3903 | const RefState *RefS = State->get<RegionState>(key: Sym); |
3904 | AllocationFamily Family = RefS->getAllocationFamily(); |
3905 | std::optional<MallocChecker::CheckKind> CheckKind = |
3906 | getCheckIfTracked(Family); |
3907 | if (!CheckKind) |
3908 | CheckKind = getCheckIfTracked(Family, IsALeakCheck: true); |
3909 | |
3910 | Sym->dumpToStream(os&: Out); |
3911 | Out << " : " ; |
3912 | Data.dump(OS&: Out); |
3913 | if (CheckKind) |
3914 | Out << " (" << CheckNames[*CheckKind] << ")" ; |
3915 | Out << NL; |
3916 | } |
3917 | } |
3918 | } |
3919 | |
3920 | namespace clang { |
3921 | namespace ento { |
3922 | namespace allocation_state { |
3923 | |
3924 | ProgramStateRef |
3925 | markReleased(ProgramStateRef State, SymbolRef Sym, const Expr *Origin) { |
3926 | AllocationFamily Family(AF_InnerBuffer); |
3927 | return State->set<RegionState>(K: Sym, E: RefState::getReleased(family: Family, s: Origin)); |
3928 | } |
3929 | |
3930 | } // end namespace allocation_state |
3931 | } // end namespace ento |
3932 | } // end namespace clang |
3933 | |
3934 | // Intended to be used in InnerPointerChecker to register the part of |
3935 | // MallocChecker connected to it. |
3936 | void ento::registerInnerPointerCheckerAux(CheckerManager &mgr) { |
3937 | MallocChecker *checker = mgr.getChecker<MallocChecker>(); |
3938 | checker->ChecksEnabled[MallocChecker::CK_InnerPointerChecker] = true; |
3939 | checker->CheckNames[MallocChecker::CK_InnerPointerChecker] = |
3940 | mgr.getCurrentCheckerName(); |
3941 | } |
3942 | |
3943 | void ento::registerDynamicMemoryModeling(CheckerManager &mgr) { |
3944 | auto *checker = mgr.registerChecker<MallocChecker>(); |
3945 | checker->ShouldIncludeOwnershipAnnotatedFunctions = |
3946 | mgr.getAnalyzerOptions().getCheckerBooleanOption(C: checker, OptionName: "Optimistic" ); |
3947 | checker->ShouldRegisterNoOwnershipChangeVisitor = |
3948 | mgr.getAnalyzerOptions().getCheckerBooleanOption( |
3949 | C: checker, OptionName: "AddNoOwnershipChangeNotes" ); |
3950 | } |
3951 | |
3952 | bool ento::shouldRegisterDynamicMemoryModeling(const CheckerManager &mgr) { |
3953 | return true; |
3954 | } |
3955 | |
3956 | #define REGISTER_CHECKER(name) \ |
3957 | void ento::register##name(CheckerManager &mgr) { \ |
3958 | MallocChecker *checker = mgr.getChecker<MallocChecker>(); \ |
3959 | checker->ChecksEnabled[MallocChecker::CK_##name] = true; \ |
3960 | checker->CheckNames[MallocChecker::CK_##name] = \ |
3961 | mgr.getCurrentCheckerName(); \ |
3962 | } \ |
3963 | \ |
3964 | bool ento::shouldRegister##name(const CheckerManager &mgr) { return true; } |
3965 | |
3966 | REGISTER_CHECKER(MallocChecker) |
3967 | REGISTER_CHECKER(NewDeleteChecker) |
3968 | REGISTER_CHECKER(NewDeleteLeaksChecker) |
3969 | REGISTER_CHECKER(MismatchedDeallocatorChecker) |
3970 | REGISTER_CHECKER(TaintedAllocChecker) |
3971 | |