1 | //=== MallocChecker.cpp - A malloc/free checker -------------------*- C++ -*--// |
2 | // |
3 | // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. |
4 | // See https://llvm.org/LICENSE.txt for license information. |
5 | // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception |
6 | // |
7 | //===----------------------------------------------------------------------===// |
8 | // |
9 | // This file defines a variety of memory management related checkers, such as |
10 | // leak, double free, and use-after-free. |
11 | // |
12 | // The following checkers are defined here: |
13 | // |
14 | // * MallocChecker |
15 | // Despite its name, it models all sorts of memory allocations and |
16 | // de- or reallocation, including but not limited to malloc, free, |
17 | // relloc, new, delete. It also reports on a variety of memory misuse |
18 | // errors. |
19 | // Many other checkers interact very closely with this checker, in fact, |
20 | // most are merely options to this one. Other checkers may register |
21 | // MallocChecker, but do not enable MallocChecker's reports (more details |
22 | // to follow around its field, ChecksEnabled). |
23 | // It also has a boolean "Optimistic" checker option, which if set to true |
24 | // will cause the checker to model user defined memory management related |
25 | // functions annotated via the attribute ownership_takes, ownership_holds |
26 | // and ownership_returns. |
27 | // |
28 | // * NewDeleteChecker |
29 | // Enables the modeling of new, new[], delete, delete[] in MallocChecker, |
30 | // and checks for related double-free and use-after-free errors. |
31 | // |
32 | // * NewDeleteLeaksChecker |
33 | // Checks for leaks related to new, new[], delete, delete[]. |
34 | // Depends on NewDeleteChecker. |
35 | // |
36 | // * MismatchedDeallocatorChecker |
37 | // Enables checking whether memory is deallocated with the correspending |
38 | // allocation function in MallocChecker, such as malloc() allocated |
39 | // regions are only freed by free(), new by delete, new[] by delete[]. |
40 | // |
41 | // InnerPointerChecker interacts very closely with MallocChecker, but unlike |
42 | // the above checkers, it has it's own file, hence the many InnerPointerChecker |
43 | // related headers and non-static functions. |
44 | // |
45 | //===----------------------------------------------------------------------===// |
46 | |
47 | #include "AllocationState.h" |
48 | #include "InterCheckerAPI.h" |
49 | #include "NoOwnershipChangeVisitor.h" |
50 | #include "clang/AST/Attr.h" |
51 | #include "clang/AST/DeclCXX.h" |
52 | #include "clang/AST/DeclTemplate.h" |
53 | #include "clang/AST/Expr.h" |
54 | #include "clang/AST/ExprCXX.h" |
55 | #include "clang/AST/ParentMap.h" |
56 | #include "clang/ASTMatchers/ASTMatchFinder.h" |
57 | #include "clang/ASTMatchers/ASTMatchers.h" |
58 | #include "clang/Analysis/ProgramPoint.h" |
59 | #include "clang/Basic/LLVM.h" |
60 | #include "clang/Basic/SourceManager.h" |
61 | #include "clang/Basic/TargetInfo.h" |
62 | #include "clang/Lex/Lexer.h" |
63 | #include "clang/StaticAnalyzer/Checkers/BuiltinCheckerRegistration.h" |
64 | #include "clang/StaticAnalyzer/Checkers/Taint.h" |
65 | #include "clang/StaticAnalyzer/Core/BugReporter/BugType.h" |
66 | #include "clang/StaticAnalyzer/Core/BugReporter/CommonBugCategories.h" |
67 | #include "clang/StaticAnalyzer/Core/Checker.h" |
68 | #include "clang/StaticAnalyzer/Core/CheckerManager.h" |
69 | #include "clang/StaticAnalyzer/Core/PathSensitive/CallDescription.h" |
70 | #include "clang/StaticAnalyzer/Core/PathSensitive/CallEvent.h" |
71 | #include "clang/StaticAnalyzer/Core/PathSensitive/CheckerContext.h" |
72 | #include "clang/StaticAnalyzer/Core/PathSensitive/CheckerHelpers.h" |
73 | #include "clang/StaticAnalyzer/Core/PathSensitive/DynamicExtent.h" |
74 | #include "clang/StaticAnalyzer/Core/PathSensitive/ExplodedGraph.h" |
75 | #include "clang/StaticAnalyzer/Core/PathSensitive/ProgramState.h" |
76 | #include "clang/StaticAnalyzer/Core/PathSensitive/ProgramStateTrait.h" |
77 | #include "clang/StaticAnalyzer/Core/PathSensitive/ProgramState_Fwd.h" |
78 | #include "clang/StaticAnalyzer/Core/PathSensitive/SVals.h" |
79 | #include "clang/StaticAnalyzer/Core/PathSensitive/StoreRef.h" |
80 | #include "clang/StaticAnalyzer/Core/PathSensitive/SymbolManager.h" |
81 | #include "llvm/ADT/STLExtras.h" |
82 | #include "llvm/ADT/SetOperations.h" |
83 | #include "llvm/ADT/StringExtras.h" |
84 | #include "llvm/Support/Casting.h" |
85 | #include "llvm/Support/Compiler.h" |
86 | #include "llvm/Support/ErrorHandling.h" |
87 | #include "llvm/Support/raw_ostream.h" |
88 | #include <functional> |
89 | #include <optional> |
90 | #include <utility> |
91 | |
92 | using namespace clang; |
93 | using namespace ento; |
94 | using namespace std::placeholders; |
95 | |
96 | //===----------------------------------------------------------------------===// |
97 | // The types of allocation we're modeling. This is used to check whether a |
98 | // dynamically allocated object is deallocated with the correct function, like |
99 | // not using operator delete on an object created by malloc(), or alloca regions |
100 | // aren't ever deallocated manually. |
101 | //===----------------------------------------------------------------------===// |
102 | |
103 | namespace { |
104 | |
105 | // Used to check correspondence between allocators and deallocators. |
106 | enum AllocationFamily { |
107 | AF_None, |
108 | AF_Malloc, |
109 | AF_CXXNew, |
110 | AF_CXXNewArray, |
111 | AF_IfNameIndex, |
112 | AF_Alloca, |
113 | AF_InnerBuffer |
114 | }; |
115 | |
116 | } // end of anonymous namespace |
117 | |
118 | /// Print names of allocators and deallocators. |
119 | /// |
120 | /// \returns true on success. |
121 | static bool printMemFnName(raw_ostream &os, CheckerContext &C, const Expr *E); |
122 | |
123 | /// Print expected name of an allocator based on the deallocator's family |
124 | /// derived from the DeallocExpr. |
125 | static void printExpectedAllocName(raw_ostream &os, AllocationFamily Family); |
126 | |
127 | /// Print expected name of a deallocator based on the allocator's |
128 | /// family. |
129 | static void printExpectedDeallocName(raw_ostream &os, AllocationFamily Family); |
130 | |
131 | //===----------------------------------------------------------------------===// |
132 | // The state of a symbol, in terms of memory management. |
133 | //===----------------------------------------------------------------------===// |
134 | |
135 | namespace { |
136 | |
137 | class RefState { |
138 | enum Kind { |
139 | // Reference to allocated memory. |
140 | Allocated, |
141 | // Reference to zero-allocated memory. |
142 | AllocatedOfSizeZero, |
143 | // Reference to released/freed memory. |
144 | Released, |
145 | // The responsibility for freeing resources has transferred from |
146 | // this reference. A relinquished symbol should not be freed. |
147 | Relinquished, |
148 | // We are no longer guaranteed to have observed all manipulations |
149 | // of this pointer/memory. For example, it could have been |
150 | // passed as a parameter to an opaque function. |
151 | Escaped |
152 | }; |
153 | |
154 | const Stmt *S; |
155 | |
156 | Kind K; |
157 | AllocationFamily Family; |
158 | |
159 | RefState(Kind k, const Stmt *s, AllocationFamily family) |
160 | : S(s), K(k), Family(family) { |
161 | assert(family != AF_None); |
162 | } |
163 | |
164 | public: |
165 | bool isAllocated() const { return K == Allocated; } |
166 | bool isAllocatedOfSizeZero() const { return K == AllocatedOfSizeZero; } |
167 | bool isReleased() const { return K == Released; } |
168 | bool isRelinquished() const { return K == Relinquished; } |
169 | bool isEscaped() const { return K == Escaped; } |
170 | AllocationFamily getAllocationFamily() const { return Family; } |
171 | const Stmt *getStmt() const { return S; } |
172 | |
173 | bool operator==(const RefState &X) const { |
174 | return K == X.K && S == X.S && Family == X.Family; |
175 | } |
176 | |
177 | static RefState getAllocated(AllocationFamily family, const Stmt *s) { |
178 | return RefState(Allocated, s, family); |
179 | } |
180 | static RefState getAllocatedOfSizeZero(const RefState *RS) { |
181 | return RefState(AllocatedOfSizeZero, RS->getStmt(), |
182 | RS->getAllocationFamily()); |
183 | } |
184 | static RefState getReleased(AllocationFamily family, const Stmt *s) { |
185 | return RefState(Released, s, family); |
186 | } |
187 | static RefState getRelinquished(AllocationFamily family, const Stmt *s) { |
188 | return RefState(Relinquished, s, family); |
189 | } |
190 | static RefState getEscaped(const RefState *RS) { |
191 | return RefState(Escaped, RS->getStmt(), RS->getAllocationFamily()); |
192 | } |
193 | |
194 | void Profile(llvm::FoldingSetNodeID &ID) const { |
195 | ID.AddInteger(I: K); |
196 | ID.AddPointer(Ptr: S); |
197 | ID.AddInteger(I: Family); |
198 | } |
199 | |
200 | LLVM_DUMP_METHOD void dump(raw_ostream &OS) const { |
201 | switch (K) { |
202 | #define CASE(ID) case ID: OS << #ID; break; |
203 | CASE(Allocated) |
204 | CASE(AllocatedOfSizeZero) |
205 | CASE(Released) |
206 | CASE(Relinquished) |
207 | CASE(Escaped) |
208 | } |
209 | } |
210 | |
211 | LLVM_DUMP_METHOD void dump() const { dump(OS&: llvm::errs()); } |
212 | }; |
213 | |
214 | } // end of anonymous namespace |
215 | |
216 | REGISTER_MAP_WITH_PROGRAMSTATE(RegionState, SymbolRef, RefState) |
217 | |
218 | /// Check if the memory associated with this symbol was released. |
219 | static bool isReleased(SymbolRef Sym, CheckerContext &C); |
220 | |
221 | /// Update the RefState to reflect the new memory allocation. |
222 | /// The optional \p RetVal parameter specifies the newly allocated pointer |
223 | /// value; if unspecified, the value of expression \p E is used. |
224 | static ProgramStateRef |
225 | MallocUpdateRefState(CheckerContext &C, const Expr *E, ProgramStateRef State, |
226 | AllocationFamily Family, |
227 | std::optional<SVal> RetVal = std::nullopt); |
228 | |
229 | //===----------------------------------------------------------------------===// |
230 | // The modeling of memory reallocation. |
231 | // |
232 | // The terminology 'toPtr' and 'fromPtr' will be used: |
233 | // toPtr = realloc(fromPtr, 20); |
234 | //===----------------------------------------------------------------------===// |
235 | |
236 | REGISTER_SET_WITH_PROGRAMSTATE(ReallocSizeZeroSymbols, SymbolRef) |
237 | |
238 | namespace { |
239 | |
240 | /// The state of 'fromPtr' after reallocation is known to have failed. |
241 | enum OwnershipAfterReallocKind { |
242 | // The symbol needs to be freed (e.g.: realloc) |
243 | OAR_ToBeFreedAfterFailure, |
244 | // The symbol has been freed (e.g.: reallocf) |
245 | OAR_FreeOnFailure, |
246 | // The symbol doesn't have to freed (e.g.: we aren't sure if, how and where |
247 | // 'fromPtr' was allocated: |
248 | // void Haha(int *ptr) { |
249 | // ptr = realloc(ptr, 67); |
250 | // // ... |
251 | // } |
252 | // ). |
253 | OAR_DoNotTrackAfterFailure |
254 | }; |
255 | |
256 | /// Stores information about the 'fromPtr' symbol after reallocation. |
257 | /// |
258 | /// This is important because realloc may fail, and that needs special modeling. |
259 | /// Whether reallocation failed or not will not be known until later, so we'll |
260 | /// store whether upon failure 'fromPtr' will be freed, or needs to be freed |
261 | /// later, etc. |
262 | struct ReallocPair { |
263 | |
264 | // The 'fromPtr'. |
265 | SymbolRef ReallocatedSym; |
266 | OwnershipAfterReallocKind Kind; |
267 | |
268 | ReallocPair(SymbolRef S, OwnershipAfterReallocKind K) |
269 | : ReallocatedSym(S), Kind(K) {} |
270 | void Profile(llvm::FoldingSetNodeID &ID) const { |
271 | ID.AddInteger(I: Kind); |
272 | ID.AddPointer(Ptr: ReallocatedSym); |
273 | } |
274 | bool operator==(const ReallocPair &X) const { |
275 | return ReallocatedSym == X.ReallocatedSym && |
276 | Kind == X.Kind; |
277 | } |
278 | }; |
279 | |
280 | } // end of anonymous namespace |
281 | |
282 | REGISTER_MAP_WITH_PROGRAMSTATE(ReallocPairs, SymbolRef, ReallocPair) |
283 | |
284 | /// Tells if the callee is one of the builtin new/delete operators, including |
285 | /// placement operators and other standard overloads. |
286 | static bool isStandardNewDelete(const FunctionDecl *FD); |
287 | static bool isStandardNewDelete(const CallEvent &Call) { |
288 | if (!Call.getDecl() || !isa<FunctionDecl>(Val: Call.getDecl())) |
289 | return false; |
290 | return isStandardNewDelete(FD: cast<FunctionDecl>(Val: Call.getDecl())); |
291 | } |
292 | |
293 | //===----------------------------------------------------------------------===// |
294 | // Definition of the MallocChecker class. |
295 | //===----------------------------------------------------------------------===// |
296 | |
297 | namespace { |
298 | |
299 | class MallocChecker |
300 | : public Checker<check::DeadSymbols, check::PointerEscape, |
301 | check::ConstPointerEscape, check::PreStmt<ReturnStmt>, |
302 | check::EndFunction, check::PreCall, check::PostCall, |
303 | check::NewAllocator, check::PostStmt<BlockExpr>, |
304 | check::PostObjCMessage, check::Location, eval::Assume> { |
305 | public: |
306 | /// In pessimistic mode, the checker assumes that it does not know which |
307 | /// functions might free the memory. |
308 | /// In optimistic mode, the checker assumes that all user-defined functions |
309 | /// which might free a pointer are annotated. |
310 | bool ShouldIncludeOwnershipAnnotatedFunctions = false; |
311 | |
312 | bool ShouldRegisterNoOwnershipChangeVisitor = false; |
313 | |
314 | /// Many checkers are essentially built into this one, so enabling them will |
315 | /// make MallocChecker perform additional modeling and reporting. |
316 | enum CheckKind { |
317 | /// When a subchecker is enabled but MallocChecker isn't, model memory |
318 | /// management but do not emit warnings emitted with MallocChecker only |
319 | /// enabled. |
320 | CK_MallocChecker, |
321 | CK_NewDeleteChecker, |
322 | CK_NewDeleteLeaksChecker, |
323 | CK_MismatchedDeallocatorChecker, |
324 | CK_InnerPointerChecker, |
325 | CK_TaintedAllocChecker, |
326 | CK_NumCheckKinds |
327 | }; |
328 | |
329 | using LeakInfo = std::pair<const ExplodedNode *, const MemRegion *>; |
330 | |
331 | bool ChecksEnabled[CK_NumCheckKinds] = {false}; |
332 | CheckerNameRef CheckNames[CK_NumCheckKinds]; |
333 | |
334 | void checkPreCall(const CallEvent &Call, CheckerContext &C) const; |
335 | void checkPostCall(const CallEvent &Call, CheckerContext &C) const; |
336 | void checkNewAllocator(const CXXAllocatorCall &Call, CheckerContext &C) const; |
337 | void checkPostObjCMessage(const ObjCMethodCall &Call, CheckerContext &C) const; |
338 | void checkPostStmt(const BlockExpr *BE, CheckerContext &C) const; |
339 | void checkDeadSymbols(SymbolReaper &SymReaper, CheckerContext &C) const; |
340 | void checkPreStmt(const ReturnStmt *S, CheckerContext &C) const; |
341 | void checkEndFunction(const ReturnStmt *S, CheckerContext &C) const; |
342 | ProgramStateRef evalAssume(ProgramStateRef state, SVal Cond, |
343 | bool Assumption) const; |
344 | void checkLocation(SVal l, bool isLoad, const Stmt *S, |
345 | CheckerContext &C) const; |
346 | |
347 | ProgramStateRef checkPointerEscape(ProgramStateRef State, |
348 | const InvalidatedSymbols &Escaped, |
349 | const CallEvent *Call, |
350 | PointerEscapeKind Kind) const; |
351 | ProgramStateRef checkConstPointerEscape(ProgramStateRef State, |
352 | const InvalidatedSymbols &Escaped, |
353 | const CallEvent *Call, |
354 | PointerEscapeKind Kind) const; |
355 | |
356 | void printState(raw_ostream &Out, ProgramStateRef State, |
357 | const char *NL, const char *Sep) const override; |
358 | |
359 | private: |
360 | mutable std::unique_ptr<BugType> BT_DoubleFree[CK_NumCheckKinds]; |
361 | mutable std::unique_ptr<BugType> BT_DoubleDelete; |
362 | mutable std::unique_ptr<BugType> BT_Leak[CK_NumCheckKinds]; |
363 | mutable std::unique_ptr<BugType> BT_UseFree[CK_NumCheckKinds]; |
364 | mutable std::unique_ptr<BugType> BT_BadFree[CK_NumCheckKinds]; |
365 | mutable std::unique_ptr<BugType> BT_FreeAlloca[CK_NumCheckKinds]; |
366 | mutable std::unique_ptr<BugType> BT_MismatchedDealloc; |
367 | mutable std::unique_ptr<BugType> BT_OffsetFree[CK_NumCheckKinds]; |
368 | mutable std::unique_ptr<BugType> BT_UseZerroAllocated[CK_NumCheckKinds]; |
369 | mutable std::unique_ptr<BugType> BT_TaintedAlloc; |
370 | |
371 | #define CHECK_FN(NAME) \ |
372 | void NAME(const CallEvent &Call, CheckerContext &C) const; |
373 | |
374 | CHECK_FN(checkFree) |
375 | CHECK_FN(checkIfNameIndex) |
376 | CHECK_FN(checkBasicAlloc) |
377 | CHECK_FN(checkKernelMalloc) |
378 | CHECK_FN(checkCalloc) |
379 | CHECK_FN(checkAlloca) |
380 | CHECK_FN(checkStrdup) |
381 | CHECK_FN(checkIfFreeNameIndex) |
382 | CHECK_FN(checkCXXNewOrCXXDelete) |
383 | CHECK_FN(checkGMalloc0) |
384 | CHECK_FN(checkGMemdup) |
385 | CHECK_FN(checkGMallocN) |
386 | CHECK_FN(checkGMallocN0) |
387 | CHECK_FN(preGetdelim) |
388 | CHECK_FN(checkGetdelim) |
389 | CHECK_FN(checkReallocN) |
390 | CHECK_FN(checkOwnershipAttr) |
391 | |
392 | void checkRealloc(const CallEvent &Call, CheckerContext &C, |
393 | bool ShouldFreeOnFail) const; |
394 | |
395 | using CheckFn = std::function<void(const MallocChecker *, |
396 | const CallEvent &Call, CheckerContext &C)>; |
397 | |
398 | const CallDescriptionMap<CheckFn> PreFnMap{ |
399 | // NOTE: the following CallDescription also matches the C++ standard |
400 | // library function std::getline(); the callback will filter it out. |
401 | {{CDM::CLibrary, {"getline" }, 3}, &MallocChecker::preGetdelim}, |
402 | {{CDM::CLibrary, {"getdelim" }, 4}, &MallocChecker::preGetdelim}, |
403 | }; |
404 | |
405 | const CallDescriptionMap<CheckFn> FreeingMemFnMap{ |
406 | {{CDM::CLibrary, {"free" }, 1}, &MallocChecker::checkFree}, |
407 | {{CDM::CLibrary, {"if_freenameindex" }, 1}, |
408 | &MallocChecker::checkIfFreeNameIndex}, |
409 | {{CDM::CLibrary, {"kfree" }, 1}, &MallocChecker::checkFree}, |
410 | {{CDM::CLibrary, {"g_free" }, 1}, &MallocChecker::checkFree}, |
411 | }; |
412 | |
413 | bool isFreeingCall(const CallEvent &Call) const; |
414 | static bool isFreeingOwnershipAttrCall(const FunctionDecl *Func); |
415 | |
416 | friend class NoMemOwnershipChangeVisitor; |
417 | |
418 | CallDescriptionMap<CheckFn> AllocatingMemFnMap{ |
419 | {{CDM::CLibrary, {"alloca" }, 1}, &MallocChecker::checkAlloca}, |
420 | {{CDM::CLibrary, {"_alloca" }, 1}, &MallocChecker::checkAlloca}, |
421 | // The line for "alloca" also covers "__builtin_alloca", but the |
422 | // _with_align variant must be listed separately because it takes an |
423 | // extra argument: |
424 | {{CDM::CLibrary, {"__builtin_alloca_with_align" }, 2}, |
425 | &MallocChecker::checkAlloca}, |
426 | {{CDM::CLibrary, {"malloc" }, 1}, &MallocChecker::checkBasicAlloc}, |
427 | {{CDM::CLibrary, {"malloc" }, 3}, &MallocChecker::checkKernelMalloc}, |
428 | {{CDM::CLibrary, {"calloc" }, 2}, &MallocChecker::checkCalloc}, |
429 | {{CDM::CLibrary, {"valloc" }, 1}, &MallocChecker::checkBasicAlloc}, |
430 | {{CDM::CLibrary, {"strndup" }, 2}, &MallocChecker::checkStrdup}, |
431 | {{CDM::CLibrary, {"strdup" }, 1}, &MallocChecker::checkStrdup}, |
432 | {{CDM::CLibrary, {"_strdup" }, 1}, &MallocChecker::checkStrdup}, |
433 | {{CDM::CLibrary, {"kmalloc" }, 2}, &MallocChecker::checkKernelMalloc}, |
434 | {{CDM::CLibrary, {"if_nameindex" }, 1}, &MallocChecker::checkIfNameIndex}, |
435 | {{CDM::CLibrary, {"wcsdup" }, 1}, &MallocChecker::checkStrdup}, |
436 | {{CDM::CLibrary, {"_wcsdup" }, 1}, &MallocChecker::checkStrdup}, |
437 | {{CDM::CLibrary, {"g_malloc" }, 1}, &MallocChecker::checkBasicAlloc}, |
438 | {{CDM::CLibrary, {"g_malloc0" }, 1}, &MallocChecker::checkGMalloc0}, |
439 | {{CDM::CLibrary, {"g_try_malloc" }, 1}, &MallocChecker::checkBasicAlloc}, |
440 | {{CDM::CLibrary, {"g_try_malloc0" }, 1}, &MallocChecker::checkGMalloc0}, |
441 | {{CDM::CLibrary, {"g_memdup" }, 2}, &MallocChecker::checkGMemdup}, |
442 | {{CDM::CLibrary, {"g_malloc_n" }, 2}, &MallocChecker::checkGMallocN}, |
443 | {{CDM::CLibrary, {"g_malloc0_n" }, 2}, &MallocChecker::checkGMallocN0}, |
444 | {{CDM::CLibrary, {"g_try_malloc_n" }, 2}, &MallocChecker::checkGMallocN}, |
445 | {{CDM::CLibrary, {"g_try_malloc0_n" }, 2}, &MallocChecker::checkGMallocN0}, |
446 | }; |
447 | |
448 | CallDescriptionMap<CheckFn> ReallocatingMemFnMap{ |
449 | {{CDM::CLibrary, {"realloc" }, 2}, |
450 | std::bind(f: &MallocChecker::checkRealloc, args: _1, args: _2, args: _3, args: false)}, |
451 | {{CDM::CLibrary, {"reallocf" }, 2}, |
452 | std::bind(f: &MallocChecker::checkRealloc, args: _1, args: _2, args: _3, args: true)}, |
453 | {{CDM::CLibrary, {"g_realloc" }, 2}, |
454 | std::bind(f: &MallocChecker::checkRealloc, args: _1, args: _2, args: _3, args: false)}, |
455 | {{CDM::CLibrary, {"g_try_realloc" }, 2}, |
456 | std::bind(f: &MallocChecker::checkRealloc, args: _1, args: _2, args: _3, args: false)}, |
457 | {{CDM::CLibrary, {"g_realloc_n" }, 3}, &MallocChecker::checkReallocN}, |
458 | {{CDM::CLibrary, {"g_try_realloc_n" }, 3}, &MallocChecker::checkReallocN}, |
459 | |
460 | // NOTE: the following CallDescription also matches the C++ standard |
461 | // library function std::getline(); the callback will filter it out. |
462 | {{CDM::CLibrary, {"getline" }, 3}, &MallocChecker::checkGetdelim}, |
463 | {{CDM::CLibrary, {"getdelim" }, 4}, &MallocChecker::checkGetdelim}, |
464 | }; |
465 | |
466 | bool isMemCall(const CallEvent &Call) const; |
467 | void reportTaintBug(StringRef Msg, ProgramStateRef State, CheckerContext &C, |
468 | llvm::ArrayRef<SymbolRef> TaintedSyms, |
469 | AllocationFamily Family) const; |
470 | |
471 | void checkTaintedness(CheckerContext &C, const CallEvent &Call, |
472 | const SVal SizeSVal, ProgramStateRef State, |
473 | AllocationFamily Family) const; |
474 | |
475 | // TODO: Remove mutable by moving the initializtaion to the registry function. |
476 | mutable std::optional<uint64_t> KernelZeroFlagVal; |
477 | |
478 | using KernelZeroSizePtrValueTy = std::optional<int>; |
479 | /// Store the value of macro called `ZERO_SIZE_PTR`. |
480 | /// The value is initialized at first use, before first use the outer |
481 | /// Optional is empty, afterwards it contains another Optional that indicates |
482 | /// if the macro value could be determined, and if yes the value itself. |
483 | mutable std::optional<KernelZeroSizePtrValueTy> KernelZeroSizePtrValue; |
484 | |
485 | /// Process C++ operator new()'s allocation, which is the part of C++ |
486 | /// new-expression that goes before the constructor. |
487 | [[nodiscard]] ProgramStateRef |
488 | processNewAllocation(const CXXAllocatorCall &Call, CheckerContext &C, |
489 | AllocationFamily Family) const; |
490 | |
491 | /// Perform a zero-allocation check. |
492 | /// |
493 | /// \param [in] Call The expression that allocates memory. |
494 | /// \param [in] IndexOfSizeArg Index of the argument that specifies the size |
495 | /// of the memory that needs to be allocated. E.g. for malloc, this would be |
496 | /// 0. |
497 | /// \param [in] RetVal Specifies the newly allocated pointer value; |
498 | /// if unspecified, the value of expression \p E is used. |
499 | [[nodiscard]] static ProgramStateRef |
500 | ProcessZeroAllocCheck(const CallEvent &Call, const unsigned IndexOfSizeArg, |
501 | ProgramStateRef State, |
502 | std::optional<SVal> RetVal = std::nullopt); |
503 | |
504 | /// Model functions with the ownership_returns attribute. |
505 | /// |
506 | /// User-defined function may have the ownership_returns attribute, which |
507 | /// annotates that the function returns with an object that was allocated on |
508 | /// the heap, and passes the ownertship to the callee. |
509 | /// |
510 | /// void __attribute((ownership_returns(malloc, 1))) *my_malloc(size_t); |
511 | /// |
512 | /// It has two parameters: |
513 | /// - first: name of the resource (e.g. 'malloc') |
514 | /// - (OPTIONAL) second: size of the allocated region |
515 | /// |
516 | /// \param [in] Call The expression that allocates memory. |
517 | /// \param [in] Att The ownership_returns attribute. |
518 | /// \param [in] State The \c ProgramState right before allocation. |
519 | /// \returns The ProgramState right after allocation. |
520 | [[nodiscard]] ProgramStateRef |
521 | MallocMemReturnsAttr(CheckerContext &C, const CallEvent &Call, |
522 | const OwnershipAttr *Att, ProgramStateRef State) const; |
523 | |
524 | /// Models memory allocation. |
525 | /// |
526 | /// \param [in] Call The expression that allocates memory. |
527 | /// \param [in] SizeEx Size of the memory that needs to be allocated. |
528 | /// \param [in] Init The value the allocated memory needs to be initialized. |
529 | /// with. For example, \c calloc initializes the allocated memory to 0, |
530 | /// malloc leaves it undefined. |
531 | /// \param [in] State The \c ProgramState right before allocation. |
532 | /// \returns The ProgramState right after allocation. |
533 | [[nodiscard]] ProgramStateRef |
534 | MallocMemAux(CheckerContext &C, const CallEvent &Call, const Expr *SizeEx, |
535 | SVal Init, ProgramStateRef State, AllocationFamily Family) const; |
536 | |
537 | /// Models memory allocation. |
538 | /// |
539 | /// \param [in] Call The expression that allocates memory. |
540 | /// \param [in] Size Size of the memory that needs to be allocated. |
541 | /// \param [in] Init The value the allocated memory needs to be initialized. |
542 | /// with. For example, \c calloc initializes the allocated memory to 0, |
543 | /// malloc leaves it undefined. |
544 | /// \param [in] State The \c ProgramState right before allocation. |
545 | /// \returns The ProgramState right after allocation. |
546 | [[nodiscard]] ProgramStateRef MallocMemAux(CheckerContext &C, |
547 | const CallEvent &Call, SVal Size, |
548 | SVal Init, ProgramStateRef State, |
549 | AllocationFamily Family) const; |
550 | |
551 | // Check if this malloc() for special flags. At present that means M_ZERO or |
552 | // __GFP_ZERO (in which case, treat it like calloc). |
553 | [[nodiscard]] std::optional<ProgramStateRef> |
554 | performKernelMalloc(const CallEvent &Call, CheckerContext &C, |
555 | const ProgramStateRef &State) const; |
556 | |
557 | /// Model functions with the ownership_takes and ownership_holds attributes. |
558 | /// |
559 | /// User-defined function may have the ownership_takes and/or ownership_holds |
560 | /// attributes, which annotates that the function frees the memory passed as a |
561 | /// parameter. |
562 | /// |
563 | /// void __attribute((ownership_takes(malloc, 1))) my_free(void *); |
564 | /// void __attribute((ownership_holds(malloc, 1))) my_hold(void *); |
565 | /// |
566 | /// They have two parameters: |
567 | /// - first: name of the resource (e.g. 'malloc') |
568 | /// - second: index of the parameter the attribute applies to |
569 | /// |
570 | /// \param [in] Call The expression that frees memory. |
571 | /// \param [in] Att The ownership_takes or ownership_holds attribute. |
572 | /// \param [in] State The \c ProgramState right before allocation. |
573 | /// \returns The ProgramState right after deallocation. |
574 | [[nodiscard]] ProgramStateRef FreeMemAttr(CheckerContext &C, |
575 | const CallEvent &Call, |
576 | const OwnershipAttr *Att, |
577 | ProgramStateRef State) const; |
578 | |
579 | /// Models memory deallocation. |
580 | /// |
581 | /// \param [in] Call The expression that frees memory. |
582 | /// \param [in] State The \c ProgramState right before allocation. |
583 | /// \param [in] Num Index of the argument that needs to be freed. This is |
584 | /// normally 0, but for custom free functions it may be different. |
585 | /// \param [in] Hold Whether the parameter at \p Index has the ownership_holds |
586 | /// attribute. |
587 | /// \param [out] IsKnownToBeAllocated Whether the memory to be freed is known |
588 | /// to have been allocated, or in other words, the symbol to be freed was |
589 | /// registered as allocated by this checker. In the following case, \c ptr |
590 | /// isn't known to be allocated. |
591 | /// void Haha(int *ptr) { |
592 | /// ptr = realloc(ptr, 67); |
593 | /// // ... |
594 | /// } |
595 | /// \param [in] ReturnsNullOnFailure Whether the memory deallocation function |
596 | /// we're modeling returns with Null on failure. |
597 | /// \returns The ProgramState right after deallocation. |
598 | [[nodiscard]] ProgramStateRef |
599 | FreeMemAux(CheckerContext &C, const CallEvent &Call, ProgramStateRef State, |
600 | unsigned Num, bool Hold, bool &IsKnownToBeAllocated, |
601 | AllocationFamily Family, bool ReturnsNullOnFailure = false) const; |
602 | |
603 | /// Models memory deallocation. |
604 | /// |
605 | /// \param [in] ArgExpr The variable who's pointee needs to be freed. |
606 | /// \param [in] Call The expression that frees the memory. |
607 | /// \param [in] State The \c ProgramState right before allocation. |
608 | /// normally 0, but for custom free functions it may be different. |
609 | /// \param [in] Hold Whether the parameter at \p Index has the ownership_holds |
610 | /// attribute. |
611 | /// \param [out] IsKnownToBeAllocated Whether the memory to be freed is known |
612 | /// to have been allocated, or in other words, the symbol to be freed was |
613 | /// registered as allocated by this checker. In the following case, \c ptr |
614 | /// isn't known to be allocated. |
615 | /// void Haha(int *ptr) { |
616 | /// ptr = realloc(ptr, 67); |
617 | /// // ... |
618 | /// } |
619 | /// \param [in] ReturnsNullOnFailure Whether the memory deallocation function |
620 | /// we're modeling returns with Null on failure. |
621 | /// \param [in] ArgValOpt Optional value to use for the argument instead of |
622 | /// the one obtained from ArgExpr. |
623 | /// \returns The ProgramState right after deallocation. |
624 | [[nodiscard]] ProgramStateRef |
625 | FreeMemAux(CheckerContext &C, const Expr *ArgExpr, const CallEvent &Call, |
626 | ProgramStateRef State, bool Hold, bool &IsKnownToBeAllocated, |
627 | AllocationFamily Family, bool ReturnsNullOnFailure = false, |
628 | std::optional<SVal> ArgValOpt = {}) const; |
629 | |
630 | // TODO: Needs some refactoring, as all other deallocation modeling |
631 | // functions are suffering from out parameters and messy code due to how |
632 | // realloc is handled. |
633 | // |
634 | /// Models memory reallocation. |
635 | /// |
636 | /// \param [in] Call The expression that reallocated memory |
637 | /// \param [in] ShouldFreeOnFail Whether if reallocation fails, the supplied |
638 | /// memory should be freed. |
639 | /// \param [in] State The \c ProgramState right before reallocation. |
640 | /// \param [in] SuffixWithN Whether the reallocation function we're modeling |
641 | /// has an '_n' suffix, such as g_realloc_n. |
642 | /// \returns The ProgramState right after reallocation. |
643 | [[nodiscard]] ProgramStateRef |
644 | ReallocMemAux(CheckerContext &C, const CallEvent &Call, bool ShouldFreeOnFail, |
645 | ProgramStateRef State, AllocationFamily Family, |
646 | bool SuffixWithN = false) const; |
647 | |
648 | /// Evaluates the buffer size that needs to be allocated. |
649 | /// |
650 | /// \param [in] Blocks The amount of blocks that needs to be allocated. |
651 | /// \param [in] BlockBytes The size of a block. |
652 | /// \returns The symbolic value of \p Blocks * \p BlockBytes. |
653 | [[nodiscard]] static SVal evalMulForBufferSize(CheckerContext &C, |
654 | const Expr *Blocks, |
655 | const Expr *BlockBytes); |
656 | |
657 | /// Models zero initialized array allocation. |
658 | /// |
659 | /// \param [in] Call The expression that reallocated memory |
660 | /// \param [in] State The \c ProgramState right before reallocation. |
661 | /// \returns The ProgramState right after allocation. |
662 | [[nodiscard]] ProgramStateRef CallocMem(CheckerContext &C, |
663 | const CallEvent &Call, |
664 | ProgramStateRef State) const; |
665 | |
666 | /// See if deallocation happens in a suspicious context. If so, escape the |
667 | /// pointers that otherwise would have been deallocated and return true. |
668 | bool suppressDeallocationsInSuspiciousContexts(const CallEvent &Call, |
669 | CheckerContext &C) const; |
670 | |
671 | /// If in \p S \p Sym is used, check whether \p Sym was already freed. |
672 | bool checkUseAfterFree(SymbolRef Sym, CheckerContext &C, const Stmt *S) const; |
673 | |
674 | /// If in \p S \p Sym is used, check whether \p Sym was allocated as a zero |
675 | /// sized memory region. |
676 | void checkUseZeroAllocated(SymbolRef Sym, CheckerContext &C, |
677 | const Stmt *S) const; |
678 | |
679 | /// If in \p S \p Sym is being freed, check whether \p Sym was already freed. |
680 | bool checkDoubleDelete(SymbolRef Sym, CheckerContext &C) const; |
681 | |
682 | /// Check if the function is known to free memory, or if it is |
683 | /// "interesting" and should be modeled explicitly. |
684 | /// |
685 | /// \param [out] EscapingSymbol A function might not free memory in general, |
686 | /// but could be known to free a particular symbol. In this case, false is |
687 | /// returned and the single escaping symbol is returned through the out |
688 | /// parameter. |
689 | /// |
690 | /// We assume that pointers do not escape through calls to system functions |
691 | /// not handled by this checker. |
692 | bool mayFreeAnyEscapedMemoryOrIsModeledExplicitly(const CallEvent *Call, |
693 | ProgramStateRef State, |
694 | SymbolRef &EscapingSymbol) const; |
695 | |
696 | /// Implementation of the checkPointerEscape callbacks. |
697 | [[nodiscard]] ProgramStateRef |
698 | checkPointerEscapeAux(ProgramStateRef State, |
699 | const InvalidatedSymbols &Escaped, |
700 | const CallEvent *Call, PointerEscapeKind Kind, |
701 | bool IsConstPointerEscape) const; |
702 | |
703 | // Implementation of the checkPreStmt and checkEndFunction callbacks. |
704 | void checkEscapeOnReturn(const ReturnStmt *S, CheckerContext &C) const; |
705 | |
706 | ///@{ |
707 | /// Tells if a given family/call/symbol is tracked by the current checker. |
708 | /// Sets CheckKind to the kind of the checker responsible for this |
709 | /// family/call/symbol. |
710 | std::optional<CheckKind> getCheckIfTracked(AllocationFamily Family, |
711 | bool IsALeakCheck = false) const; |
712 | |
713 | std::optional<CheckKind> getCheckIfTracked(CheckerContext &C, SymbolRef Sym, |
714 | bool IsALeakCheck = false) const; |
715 | ///@} |
716 | static bool SummarizeValue(raw_ostream &os, SVal V); |
717 | static bool SummarizeRegion(raw_ostream &os, const MemRegion *MR); |
718 | |
719 | void HandleNonHeapDealloc(CheckerContext &C, SVal ArgVal, SourceRange Range, |
720 | const Expr *DeallocExpr, |
721 | AllocationFamily Family) const; |
722 | |
723 | void HandleFreeAlloca(CheckerContext &C, SVal ArgVal, |
724 | SourceRange Range) const; |
725 | |
726 | void HandleMismatchedDealloc(CheckerContext &C, SourceRange Range, |
727 | const Expr *DeallocExpr, const RefState *RS, |
728 | SymbolRef Sym, bool OwnershipTransferred) const; |
729 | |
730 | void HandleOffsetFree(CheckerContext &C, SVal ArgVal, SourceRange Range, |
731 | const Expr *DeallocExpr, AllocationFamily Family, |
732 | const Expr *AllocExpr = nullptr) const; |
733 | |
734 | void HandleUseAfterFree(CheckerContext &C, SourceRange Range, |
735 | SymbolRef Sym) const; |
736 | |
737 | void HandleDoubleFree(CheckerContext &C, SourceRange Range, bool Released, |
738 | SymbolRef Sym, SymbolRef PrevSym) const; |
739 | |
740 | void HandleDoubleDelete(CheckerContext &C, SymbolRef Sym) const; |
741 | |
742 | void HandleUseZeroAlloc(CheckerContext &C, SourceRange Range, |
743 | SymbolRef Sym) const; |
744 | |
745 | void HandleFunctionPtrFree(CheckerContext &C, SVal ArgVal, SourceRange Range, |
746 | const Expr *FreeExpr, |
747 | AllocationFamily Family) const; |
748 | |
749 | /// Find the location of the allocation for Sym on the path leading to the |
750 | /// exploded node N. |
751 | static LeakInfo getAllocationSite(const ExplodedNode *N, SymbolRef Sym, |
752 | CheckerContext &C); |
753 | |
754 | void HandleLeak(SymbolRef Sym, ExplodedNode *N, CheckerContext &C) const; |
755 | |
756 | /// Test if value in ArgVal equals to value in macro `ZERO_SIZE_PTR`. |
757 | bool isArgZERO_SIZE_PTR(ProgramStateRef State, CheckerContext &C, |
758 | SVal ArgVal) const; |
759 | }; |
760 | } // end anonymous namespace |
761 | |
762 | //===----------------------------------------------------------------------===// |
763 | // Definition of NoOwnershipChangeVisitor. |
764 | //===----------------------------------------------------------------------===// |
765 | |
766 | namespace { |
767 | class NoMemOwnershipChangeVisitor final : public NoOwnershipChangeVisitor { |
768 | protected: |
769 | /// Syntactically checks whether the callee is a deallocating function. Since |
770 | /// we have no path-sensitive information on this call (we would need a |
771 | /// CallEvent instead of a CallExpr for that), its possible that a |
772 | /// deallocation function was called indirectly through a function pointer, |
773 | /// but we are not able to tell, so this is a best effort analysis. |
774 | /// See namespace `memory_passed_to_fn_call_free_through_fn_ptr` in |
775 | /// clang/test/Analysis/NewDeleteLeaks.cpp. |
776 | bool isFreeingCallAsWritten(const CallExpr &Call) const { |
777 | const auto *MallocChk = static_cast<const MallocChecker *>(&Checker); |
778 | if (MallocChk->FreeingMemFnMap.lookupAsWritten(Call) || |
779 | MallocChk->ReallocatingMemFnMap.lookupAsWritten(Call)) |
780 | return true; |
781 | |
782 | if (const auto *Func = |
783 | llvm::dyn_cast_or_null<FunctionDecl>(Val: Call.getCalleeDecl())) |
784 | return MallocChecker::isFreeingOwnershipAttrCall(Func); |
785 | |
786 | return false; |
787 | } |
788 | |
789 | bool hasResourceStateChanged(ProgramStateRef CallEnterState, |
790 | ProgramStateRef CallExitEndState) final { |
791 | return CallEnterState->get<RegionState>(key: Sym) != |
792 | CallExitEndState->get<RegionState>(key: Sym); |
793 | } |
794 | |
795 | /// Heuristically guess whether the callee intended to free memory. This is |
796 | /// done syntactically, because we are trying to argue about alternative |
797 | /// paths of execution, and as a consequence we don't have path-sensitive |
798 | /// information. |
799 | bool doesFnIntendToHandleOwnership(const Decl *Callee, |
800 | ASTContext &ACtx) final { |
801 | using namespace clang::ast_matchers; |
802 | const FunctionDecl *FD = dyn_cast<FunctionDecl>(Val: Callee); |
803 | |
804 | auto Matches = match(Matcher: findAll(Matcher: stmt(anyOf(cxxDeleteExpr().bind(ID: "delete" ), |
805 | callExpr().bind(ID: "call" )))), |
806 | Node: *FD->getBody(), Context&: ACtx); |
807 | for (BoundNodes Match : Matches) { |
808 | if (Match.getNodeAs<CXXDeleteExpr>(ID: "delete" )) |
809 | return true; |
810 | |
811 | if (const auto *Call = Match.getNodeAs<CallExpr>(ID: "call" )) |
812 | if (isFreeingCallAsWritten(Call: *Call)) |
813 | return true; |
814 | } |
815 | // TODO: Ownership might change with an attempt to store the allocated |
816 | // memory, not only through deallocation. Check for attempted stores as |
817 | // well. |
818 | return false; |
819 | } |
820 | |
821 | PathDiagnosticPieceRef emitNote(const ExplodedNode *N) final { |
822 | PathDiagnosticLocation L = PathDiagnosticLocation::create( |
823 | P: N->getLocation(), |
824 | SMng: N->getState()->getStateManager().getContext().getSourceManager()); |
825 | return std::make_shared<PathDiagnosticEventPiece>( |
826 | args&: L, args: "Returning without deallocating memory or storing the pointer for " |
827 | "later deallocation" ); |
828 | } |
829 | |
830 | public: |
831 | NoMemOwnershipChangeVisitor(SymbolRef Sym, const MallocChecker *Checker) |
832 | : NoOwnershipChangeVisitor(Sym, Checker) {} |
833 | |
834 | void Profile(llvm::FoldingSetNodeID &ID) const override { |
835 | static int Tag = 0; |
836 | ID.AddPointer(Ptr: &Tag); |
837 | ID.AddPointer(Ptr: Sym); |
838 | } |
839 | }; |
840 | |
841 | } // end anonymous namespace |
842 | |
843 | //===----------------------------------------------------------------------===// |
844 | // Definition of MallocBugVisitor. |
845 | //===----------------------------------------------------------------------===// |
846 | |
847 | namespace { |
848 | /// The bug visitor which allows us to print extra diagnostics along the |
849 | /// BugReport path. For example, showing the allocation site of the leaked |
850 | /// region. |
851 | class MallocBugVisitor final : public BugReporterVisitor { |
852 | protected: |
853 | enum NotificationMode { Normal, ReallocationFailed }; |
854 | |
855 | // The allocated region symbol tracked by the main analysis. |
856 | SymbolRef Sym; |
857 | |
858 | // The mode we are in, i.e. what kind of diagnostics will be emitted. |
859 | NotificationMode Mode; |
860 | |
861 | // A symbol from when the primary region should have been reallocated. |
862 | SymbolRef FailedReallocSymbol; |
863 | |
864 | // A C++ destructor stack frame in which memory was released. Used for |
865 | // miscellaneous false positive suppression. |
866 | const StackFrameContext *ReleaseDestructorLC; |
867 | |
868 | bool IsLeak; |
869 | |
870 | public: |
871 | MallocBugVisitor(SymbolRef S, bool isLeak = false) |
872 | : Sym(S), Mode(Normal), FailedReallocSymbol(nullptr), |
873 | ReleaseDestructorLC(nullptr), IsLeak(isLeak) {} |
874 | |
875 | static void *getTag() { |
876 | static int Tag = 0; |
877 | return &Tag; |
878 | } |
879 | |
880 | void Profile(llvm::FoldingSetNodeID &ID) const override { |
881 | ID.AddPointer(Ptr: getTag()); |
882 | ID.AddPointer(Ptr: Sym); |
883 | } |
884 | |
885 | /// Did not track -> allocated. Other state (released) -> allocated. |
886 | static inline bool isAllocated(const RefState *RSCurr, const RefState *RSPrev, |
887 | const Stmt *Stmt) { |
888 | return (isa_and_nonnull<CallExpr, CXXNewExpr>(Val: Stmt) && |
889 | (RSCurr && |
890 | (RSCurr->isAllocated() || RSCurr->isAllocatedOfSizeZero())) && |
891 | (!RSPrev || |
892 | !(RSPrev->isAllocated() || RSPrev->isAllocatedOfSizeZero()))); |
893 | } |
894 | |
895 | /// Did not track -> released. Other state (allocated) -> released. |
896 | /// The statement associated with the release might be missing. |
897 | static inline bool isReleased(const RefState *RSCurr, const RefState *RSPrev, |
898 | const Stmt *Stmt) { |
899 | bool IsReleased = |
900 | (RSCurr && RSCurr->isReleased()) && (!RSPrev || !RSPrev->isReleased()); |
901 | assert(!IsReleased || (isa_and_nonnull<CallExpr, CXXDeleteExpr>(Stmt)) || |
902 | (!Stmt && RSCurr->getAllocationFamily() == AF_InnerBuffer)); |
903 | return IsReleased; |
904 | } |
905 | |
906 | /// Did not track -> relinquished. Other state (allocated) -> relinquished. |
907 | static inline bool isRelinquished(const RefState *RSCurr, |
908 | const RefState *RSPrev, const Stmt *Stmt) { |
909 | return ( |
910 | isa_and_nonnull<CallExpr, ObjCMessageExpr, ObjCPropertyRefExpr>(Val: Stmt) && |
911 | (RSCurr && RSCurr->isRelinquished()) && |
912 | (!RSPrev || !RSPrev->isRelinquished())); |
913 | } |
914 | |
915 | /// If the expression is not a call, and the state change is |
916 | /// released -> allocated, it must be the realloc return value |
917 | /// check. If we have to handle more cases here, it might be cleaner just |
918 | /// to track this extra bit in the state itself. |
919 | static inline bool hasReallocFailed(const RefState *RSCurr, |
920 | const RefState *RSPrev, |
921 | const Stmt *Stmt) { |
922 | return ((!isa_and_nonnull<CallExpr>(Val: Stmt)) && |
923 | (RSCurr && |
924 | (RSCurr->isAllocated() || RSCurr->isAllocatedOfSizeZero())) && |
925 | (RSPrev && |
926 | !(RSPrev->isAllocated() || RSPrev->isAllocatedOfSizeZero()))); |
927 | } |
928 | |
929 | PathDiagnosticPieceRef VisitNode(const ExplodedNode *N, |
930 | BugReporterContext &BRC, |
931 | PathSensitiveBugReport &BR) override; |
932 | |
933 | PathDiagnosticPieceRef getEndPath(BugReporterContext &BRC, |
934 | const ExplodedNode *EndPathNode, |
935 | PathSensitiveBugReport &BR) override { |
936 | if (!IsLeak) |
937 | return nullptr; |
938 | |
939 | PathDiagnosticLocation L = BR.getLocation(); |
940 | // Do not add the statement itself as a range in case of leak. |
941 | return std::make_shared<PathDiagnosticEventPiece>(args&: L, args: BR.getDescription(), |
942 | args: false); |
943 | } |
944 | |
945 | private: |
946 | class StackHintGeneratorForReallocationFailed |
947 | : public StackHintGeneratorForSymbol { |
948 | public: |
949 | StackHintGeneratorForReallocationFailed(SymbolRef S, StringRef M) |
950 | : StackHintGeneratorForSymbol(S, M) {} |
951 | |
952 | std::string getMessageForArg(const Expr *ArgE, unsigned ArgIndex) override { |
953 | // Printed parameters start at 1, not 0. |
954 | ++ArgIndex; |
955 | |
956 | SmallString<200> buf; |
957 | llvm::raw_svector_ostream os(buf); |
958 | |
959 | os << "Reallocation of " << ArgIndex << llvm::getOrdinalSuffix(Val: ArgIndex) |
960 | << " parameter failed" ; |
961 | |
962 | return std::string(os.str()); |
963 | } |
964 | |
965 | std::string getMessageForReturn(const CallExpr *CallExpr) override { |
966 | return "Reallocation of returned value failed" ; |
967 | } |
968 | }; |
969 | }; |
970 | } // end anonymous namespace |
971 | |
972 | // A map from the freed symbol to the symbol representing the return value of |
973 | // the free function. |
974 | REGISTER_MAP_WITH_PROGRAMSTATE(FreeReturnValue, SymbolRef, SymbolRef) |
975 | |
976 | namespace { |
977 | class StopTrackingCallback final : public SymbolVisitor { |
978 | ProgramStateRef state; |
979 | |
980 | public: |
981 | StopTrackingCallback(ProgramStateRef st) : state(std::move(st)) {} |
982 | ProgramStateRef getState() const { return state; } |
983 | |
984 | bool VisitSymbol(SymbolRef sym) override { |
985 | state = state->remove<RegionState>(K: sym); |
986 | return true; |
987 | } |
988 | }; |
989 | } // end anonymous namespace |
990 | |
991 | static bool isStandardNewDelete(const FunctionDecl *FD) { |
992 | if (!FD) |
993 | return false; |
994 | |
995 | OverloadedOperatorKind Kind = FD->getOverloadedOperator(); |
996 | if (Kind != OO_New && Kind != OO_Array_New && Kind != OO_Delete && |
997 | Kind != OO_Array_Delete) |
998 | return false; |
999 | |
1000 | // This is standard if and only if it's not defined in a user file. |
1001 | SourceLocation L = FD->getLocation(); |
1002 | // If the header for operator delete is not included, it's still defined |
1003 | // in an invalid source location. Check to make sure we don't crash. |
1004 | return !L.isValid() || |
1005 | FD->getASTContext().getSourceManager().isInSystemHeader(Loc: L); |
1006 | } |
1007 | |
1008 | //===----------------------------------------------------------------------===// |
1009 | // Methods of MallocChecker and MallocBugVisitor. |
1010 | //===----------------------------------------------------------------------===// |
1011 | |
1012 | bool MallocChecker::isFreeingOwnershipAttrCall(const FunctionDecl *Func) { |
1013 | if (Func->hasAttrs()) { |
1014 | for (const auto *I : Func->specific_attrs<OwnershipAttr>()) { |
1015 | OwnershipAttr::OwnershipKind OwnKind = I->getOwnKind(); |
1016 | if (OwnKind == OwnershipAttr::Takes || OwnKind == OwnershipAttr::Holds) |
1017 | return true; |
1018 | } |
1019 | } |
1020 | return false; |
1021 | } |
1022 | |
1023 | bool MallocChecker::isFreeingCall(const CallEvent &Call) const { |
1024 | if (FreeingMemFnMap.lookup(Call) || ReallocatingMemFnMap.lookup(Call)) |
1025 | return true; |
1026 | |
1027 | if (const auto *Func = dyn_cast_or_null<FunctionDecl>(Val: Call.getDecl())) |
1028 | return isFreeingOwnershipAttrCall(Func); |
1029 | |
1030 | return false; |
1031 | } |
1032 | |
1033 | bool MallocChecker::isMemCall(const CallEvent &Call) const { |
1034 | if (FreeingMemFnMap.lookup(Call) || AllocatingMemFnMap.lookup(Call) || |
1035 | ReallocatingMemFnMap.lookup(Call)) |
1036 | return true; |
1037 | |
1038 | if (!ShouldIncludeOwnershipAnnotatedFunctions) |
1039 | return false; |
1040 | |
1041 | const auto *Func = dyn_cast<FunctionDecl>(Val: Call.getDecl()); |
1042 | return Func && Func->hasAttr<OwnershipAttr>(); |
1043 | } |
1044 | |
1045 | std::optional<ProgramStateRef> |
1046 | MallocChecker::performKernelMalloc(const CallEvent &Call, CheckerContext &C, |
1047 | const ProgramStateRef &State) const { |
1048 | // 3-argument malloc(), as commonly used in {Free,Net,Open}BSD Kernels: |
1049 | // |
1050 | // void *malloc(unsigned long size, struct malloc_type *mtp, int flags); |
1051 | // |
1052 | // One of the possible flags is M_ZERO, which means 'give me back an |
1053 | // allocation which is already zeroed', like calloc. |
1054 | |
1055 | // 2-argument kmalloc(), as used in the Linux kernel: |
1056 | // |
1057 | // void *kmalloc(size_t size, gfp_t flags); |
1058 | // |
1059 | // Has the similar flag value __GFP_ZERO. |
1060 | |
1061 | // This logic is largely cloned from O_CREAT in UnixAPIChecker, maybe some |
1062 | // code could be shared. |
1063 | |
1064 | ASTContext &Ctx = C.getASTContext(); |
1065 | llvm::Triple::OSType OS = Ctx.getTargetInfo().getTriple().getOS(); |
1066 | |
1067 | if (!KernelZeroFlagVal) { |
1068 | switch (OS) { |
1069 | case llvm::Triple::FreeBSD: |
1070 | KernelZeroFlagVal = 0x0100; |
1071 | break; |
1072 | case llvm::Triple::NetBSD: |
1073 | KernelZeroFlagVal = 0x0002; |
1074 | break; |
1075 | case llvm::Triple::OpenBSD: |
1076 | KernelZeroFlagVal = 0x0008; |
1077 | break; |
1078 | case llvm::Triple::Linux: |
1079 | // __GFP_ZERO |
1080 | KernelZeroFlagVal = 0x8000; |
1081 | break; |
1082 | default: |
1083 | // FIXME: We need a more general way of getting the M_ZERO value. |
1084 | // See also: O_CREAT in UnixAPIChecker.cpp. |
1085 | |
1086 | // Fall back to normal malloc behavior on platforms where we don't |
1087 | // know M_ZERO. |
1088 | return std::nullopt; |
1089 | } |
1090 | } |
1091 | |
1092 | // We treat the last argument as the flags argument, and callers fall-back to |
1093 | // normal malloc on a None return. This works for the FreeBSD kernel malloc |
1094 | // as well as Linux kmalloc. |
1095 | if (Call.getNumArgs() < 2) |
1096 | return std::nullopt; |
1097 | |
1098 | const Expr *FlagsEx = Call.getArgExpr(Index: Call.getNumArgs() - 1); |
1099 | const SVal V = C.getSVal(S: FlagsEx); |
1100 | if (!isa<NonLoc>(Val: V)) { |
1101 | // The case where 'V' can be a location can only be due to a bad header, |
1102 | // so in this case bail out. |
1103 | return std::nullopt; |
1104 | } |
1105 | |
1106 | NonLoc Flags = V.castAs<NonLoc>(); |
1107 | NonLoc ZeroFlag = C.getSValBuilder() |
1108 | .makeIntVal(integer: *KernelZeroFlagVal, type: FlagsEx->getType()) |
1109 | .castAs<NonLoc>(); |
1110 | SVal MaskedFlagsUC = C.getSValBuilder().evalBinOpNN(state: State, op: BO_And, |
1111 | lhs: Flags, rhs: ZeroFlag, |
1112 | resultTy: FlagsEx->getType()); |
1113 | if (MaskedFlagsUC.isUnknownOrUndef()) |
1114 | return std::nullopt; |
1115 | DefinedSVal MaskedFlags = MaskedFlagsUC.castAs<DefinedSVal>(); |
1116 | |
1117 | // Check if maskedFlags is non-zero. |
1118 | ProgramStateRef TrueState, FalseState; |
1119 | std::tie(args&: TrueState, args&: FalseState) = State->assume(Cond: MaskedFlags); |
1120 | |
1121 | // If M_ZERO is set, treat this like calloc (initialized). |
1122 | if (TrueState && !FalseState) { |
1123 | SVal ZeroVal = C.getSValBuilder().makeZeroVal(type: Ctx.CharTy); |
1124 | return MallocMemAux(C, Call, SizeEx: Call.getArgExpr(Index: 0), Init: ZeroVal, State: TrueState, |
1125 | Family: AF_Malloc); |
1126 | } |
1127 | |
1128 | return std::nullopt; |
1129 | } |
1130 | |
1131 | SVal MallocChecker::evalMulForBufferSize(CheckerContext &C, const Expr *Blocks, |
1132 | const Expr *BlockBytes) { |
1133 | SValBuilder &SB = C.getSValBuilder(); |
1134 | SVal BlocksVal = C.getSVal(S: Blocks); |
1135 | SVal BlockBytesVal = C.getSVal(S: BlockBytes); |
1136 | ProgramStateRef State = C.getState(); |
1137 | SVal TotalSize = SB.evalBinOp(state: State, op: BO_Mul, lhs: BlocksVal, rhs: BlockBytesVal, |
1138 | type: SB.getContext().getSizeType()); |
1139 | return TotalSize; |
1140 | } |
1141 | |
1142 | void MallocChecker::checkBasicAlloc(const CallEvent &Call, |
1143 | CheckerContext &C) const { |
1144 | ProgramStateRef State = C.getState(); |
1145 | State = MallocMemAux(C, Call, SizeEx: Call.getArgExpr(Index: 0), Init: UndefinedVal(), State, |
1146 | Family: AF_Malloc); |
1147 | State = ProcessZeroAllocCheck(Call, IndexOfSizeArg: 0, State); |
1148 | C.addTransition(State); |
1149 | } |
1150 | |
1151 | void MallocChecker::checkKernelMalloc(const CallEvent &Call, |
1152 | CheckerContext &C) const { |
1153 | ProgramStateRef State = C.getState(); |
1154 | std::optional<ProgramStateRef> MaybeState = |
1155 | performKernelMalloc(Call, C, State); |
1156 | if (MaybeState) |
1157 | State = *MaybeState; |
1158 | else |
1159 | State = MallocMemAux(C, Call, SizeEx: Call.getArgExpr(Index: 0), Init: UndefinedVal(), State, |
1160 | Family: AF_Malloc); |
1161 | C.addTransition(State); |
1162 | } |
1163 | |
1164 | static bool isStandardRealloc(const CallEvent &Call) { |
1165 | const FunctionDecl *FD = dyn_cast<FunctionDecl>(Val: Call.getDecl()); |
1166 | assert(FD); |
1167 | ASTContext &AC = FD->getASTContext(); |
1168 | |
1169 | return FD->getDeclaredReturnType().getDesugaredType(Context: AC) == AC.VoidPtrTy && |
1170 | FD->getParamDecl(i: 0)->getType().getDesugaredType(Context: AC) == AC.VoidPtrTy && |
1171 | FD->getParamDecl(i: 1)->getType().getDesugaredType(Context: AC) == |
1172 | AC.getSizeType(); |
1173 | } |
1174 | |
1175 | static bool isGRealloc(const CallEvent &Call) { |
1176 | const FunctionDecl *FD = dyn_cast<FunctionDecl>(Val: Call.getDecl()); |
1177 | assert(FD); |
1178 | ASTContext &AC = FD->getASTContext(); |
1179 | |
1180 | return FD->getDeclaredReturnType().getDesugaredType(Context: AC) == AC.VoidPtrTy && |
1181 | FD->getParamDecl(i: 0)->getType().getDesugaredType(Context: AC) == AC.VoidPtrTy && |
1182 | FD->getParamDecl(i: 1)->getType().getDesugaredType(Context: AC) == |
1183 | AC.UnsignedLongTy; |
1184 | } |
1185 | |
1186 | void MallocChecker::checkRealloc(const CallEvent &Call, CheckerContext &C, |
1187 | bool ShouldFreeOnFail) const { |
1188 | // Ignore calls to functions whose type does not match the expected type of |
1189 | // either the standard realloc or g_realloc from GLib. |
1190 | // FIXME: Should we perform this kind of checking consistently for each |
1191 | // function? If yes, then perhaps extend the `CallDescription` interface to |
1192 | // handle this. |
1193 | if (!isStandardRealloc(Call) && !isGRealloc(Call)) |
1194 | return; |
1195 | |
1196 | ProgramStateRef State = C.getState(); |
1197 | State = ReallocMemAux(C, Call, ShouldFreeOnFail, State, Family: AF_Malloc); |
1198 | State = ProcessZeroAllocCheck(Call, IndexOfSizeArg: 1, State); |
1199 | C.addTransition(State); |
1200 | } |
1201 | |
1202 | void MallocChecker::checkCalloc(const CallEvent &Call, |
1203 | CheckerContext &C) const { |
1204 | ProgramStateRef State = C.getState(); |
1205 | State = CallocMem(C, Call, State); |
1206 | State = ProcessZeroAllocCheck(Call, IndexOfSizeArg: 0, State); |
1207 | State = ProcessZeroAllocCheck(Call, IndexOfSizeArg: 1, State); |
1208 | C.addTransition(State); |
1209 | } |
1210 | |
1211 | void MallocChecker::checkFree(const CallEvent &Call, CheckerContext &C) const { |
1212 | ProgramStateRef State = C.getState(); |
1213 | bool IsKnownToBeAllocatedMemory = false; |
1214 | if (suppressDeallocationsInSuspiciousContexts(Call, C)) |
1215 | return; |
1216 | State = FreeMemAux(C, Call, State, Num: 0, Hold: false, IsKnownToBeAllocated&: IsKnownToBeAllocatedMemory, |
1217 | Family: AF_Malloc); |
1218 | C.addTransition(State); |
1219 | } |
1220 | |
1221 | void MallocChecker::checkAlloca(const CallEvent &Call, |
1222 | CheckerContext &C) const { |
1223 | ProgramStateRef State = C.getState(); |
1224 | State = MallocMemAux(C, Call, SizeEx: Call.getArgExpr(Index: 0), Init: UndefinedVal(), State, |
1225 | Family: AF_Alloca); |
1226 | State = ProcessZeroAllocCheck(Call, IndexOfSizeArg: 0, State); |
1227 | C.addTransition(State); |
1228 | } |
1229 | |
1230 | void MallocChecker::checkStrdup(const CallEvent &Call, |
1231 | CheckerContext &C) const { |
1232 | ProgramStateRef State = C.getState(); |
1233 | const auto *CE = dyn_cast_or_null<CallExpr>(Val: Call.getOriginExpr()); |
1234 | if (!CE) |
1235 | return; |
1236 | State = MallocUpdateRefState(C, E: CE, State, Family: AF_Malloc); |
1237 | |
1238 | C.addTransition(State); |
1239 | } |
1240 | |
1241 | void MallocChecker::checkIfNameIndex(const CallEvent &Call, |
1242 | CheckerContext &C) const { |
1243 | ProgramStateRef State = C.getState(); |
1244 | // Should we model this differently? We can allocate a fixed number of |
1245 | // elements with zeros in the last one. |
1246 | State = |
1247 | MallocMemAux(C, Call, Size: UnknownVal(), Init: UnknownVal(), State, Family: AF_IfNameIndex); |
1248 | |
1249 | C.addTransition(State); |
1250 | } |
1251 | |
1252 | void MallocChecker::checkIfFreeNameIndex(const CallEvent &Call, |
1253 | CheckerContext &C) const { |
1254 | ProgramStateRef State = C.getState(); |
1255 | bool IsKnownToBeAllocatedMemory = false; |
1256 | State = FreeMemAux(C, Call, State, Num: 0, Hold: false, IsKnownToBeAllocated&: IsKnownToBeAllocatedMemory, |
1257 | Family: AF_IfNameIndex); |
1258 | C.addTransition(State); |
1259 | } |
1260 | |
1261 | void MallocChecker::checkCXXNewOrCXXDelete(const CallEvent &Call, |
1262 | CheckerContext &C) const { |
1263 | ProgramStateRef State = C.getState(); |
1264 | bool IsKnownToBeAllocatedMemory = false; |
1265 | const auto *CE = dyn_cast_or_null<CallExpr>(Val: Call.getOriginExpr()); |
1266 | if (!CE) |
1267 | return; |
1268 | |
1269 | assert(isStandardNewDelete(Call)); |
1270 | |
1271 | // Process direct calls to operator new/new[]/delete/delete[] functions |
1272 | // as distinct from new/new[]/delete/delete[] expressions that are |
1273 | // processed by the checkPostStmt callbacks for CXXNewExpr and |
1274 | // CXXDeleteExpr. |
1275 | const FunctionDecl *FD = C.getCalleeDecl(CE); |
1276 | switch (FD->getOverloadedOperator()) { |
1277 | case OO_New: |
1278 | State = |
1279 | MallocMemAux(C, Call, SizeEx: CE->getArg(Arg: 0), Init: UndefinedVal(), State, Family: AF_CXXNew); |
1280 | State = ProcessZeroAllocCheck(Call, IndexOfSizeArg: 0, State); |
1281 | break; |
1282 | case OO_Array_New: |
1283 | State = MallocMemAux(C, Call, SizeEx: CE->getArg(Arg: 0), Init: UndefinedVal(), State, |
1284 | Family: AF_CXXNewArray); |
1285 | State = ProcessZeroAllocCheck(Call, IndexOfSizeArg: 0, State); |
1286 | break; |
1287 | case OO_Delete: |
1288 | State = FreeMemAux(C, Call, State, Num: 0, Hold: false, IsKnownToBeAllocated&: IsKnownToBeAllocatedMemory, |
1289 | Family: AF_CXXNew); |
1290 | break; |
1291 | case OO_Array_Delete: |
1292 | State = FreeMemAux(C, Call, State, Num: 0, Hold: false, IsKnownToBeAllocated&: IsKnownToBeAllocatedMemory, |
1293 | Family: AF_CXXNewArray); |
1294 | break; |
1295 | default: |
1296 | llvm_unreachable("not a new/delete operator" ); |
1297 | } |
1298 | |
1299 | C.addTransition(State); |
1300 | } |
1301 | |
1302 | void MallocChecker::checkGMalloc0(const CallEvent &Call, |
1303 | CheckerContext &C) const { |
1304 | ProgramStateRef State = C.getState(); |
1305 | SValBuilder &svalBuilder = C.getSValBuilder(); |
1306 | SVal zeroVal = svalBuilder.makeZeroVal(type: svalBuilder.getContext().CharTy); |
1307 | State = MallocMemAux(C, Call, SizeEx: Call.getArgExpr(Index: 0), Init: zeroVal, State, Family: AF_Malloc); |
1308 | State = ProcessZeroAllocCheck(Call, IndexOfSizeArg: 0, State); |
1309 | C.addTransition(State); |
1310 | } |
1311 | |
1312 | void MallocChecker::checkGMemdup(const CallEvent &Call, |
1313 | CheckerContext &C) const { |
1314 | ProgramStateRef State = C.getState(); |
1315 | State = |
1316 | MallocMemAux(C, Call, SizeEx: Call.getArgExpr(Index: 1), Init: UnknownVal(), State, Family: AF_Malloc); |
1317 | State = ProcessZeroAllocCheck(Call, IndexOfSizeArg: 1, State); |
1318 | C.addTransition(State); |
1319 | } |
1320 | |
1321 | void MallocChecker::checkGMallocN(const CallEvent &Call, |
1322 | CheckerContext &C) const { |
1323 | ProgramStateRef State = C.getState(); |
1324 | SVal Init = UndefinedVal(); |
1325 | SVal TotalSize = evalMulForBufferSize(C, Blocks: Call.getArgExpr(Index: 0), BlockBytes: Call.getArgExpr(Index: 1)); |
1326 | State = MallocMemAux(C, Call, Size: TotalSize, Init, State, Family: AF_Malloc); |
1327 | State = ProcessZeroAllocCheck(Call, IndexOfSizeArg: 0, State); |
1328 | State = ProcessZeroAllocCheck(Call, IndexOfSizeArg: 1, State); |
1329 | C.addTransition(State); |
1330 | } |
1331 | |
1332 | void MallocChecker::checkGMallocN0(const CallEvent &Call, |
1333 | CheckerContext &C) const { |
1334 | ProgramStateRef State = C.getState(); |
1335 | SValBuilder &SB = C.getSValBuilder(); |
1336 | SVal Init = SB.makeZeroVal(type: SB.getContext().CharTy); |
1337 | SVal TotalSize = evalMulForBufferSize(C, Blocks: Call.getArgExpr(Index: 0), BlockBytes: Call.getArgExpr(Index: 1)); |
1338 | State = MallocMemAux(C, Call, Size: TotalSize, Init, State, Family: AF_Malloc); |
1339 | State = ProcessZeroAllocCheck(Call, IndexOfSizeArg: 0, State); |
1340 | State = ProcessZeroAllocCheck(Call, IndexOfSizeArg: 1, State); |
1341 | C.addTransition(State); |
1342 | } |
1343 | |
1344 | static bool isFromStdNamespace(const CallEvent &Call) { |
1345 | const Decl *FD = Call.getDecl(); |
1346 | assert(FD && "a CallDescription cannot match a call without a Decl" ); |
1347 | return FD->isInStdNamespace(); |
1348 | } |
1349 | |
1350 | void MallocChecker::preGetdelim(const CallEvent &Call, |
1351 | CheckerContext &C) const { |
1352 | // Discard calls to the C++ standard library function std::getline(), which |
1353 | // is completely unrelated to the POSIX getline() that we're checking. |
1354 | if (isFromStdNamespace(Call)) |
1355 | return; |
1356 | |
1357 | ProgramStateRef State = C.getState(); |
1358 | const auto LinePtr = getPointeeVal(PtrSVal: Call.getArgSVal(Index: 0), State); |
1359 | if (!LinePtr) |
1360 | return; |
1361 | |
1362 | // FreeMemAux takes IsKnownToBeAllocated as an output parameter, and it will |
1363 | // be true after the call if the symbol was registered by this checker. |
1364 | // We do not need this value here, as FreeMemAux will take care |
1365 | // of reporting any violation of the preconditions. |
1366 | bool IsKnownToBeAllocated = false; |
1367 | State = FreeMemAux(C, ArgExpr: Call.getArgExpr(Index: 0), Call, State, Hold: false, |
1368 | IsKnownToBeAllocated, Family: AF_Malloc, ReturnsNullOnFailure: false, ArgValOpt: LinePtr); |
1369 | if (State) |
1370 | C.addTransition(State); |
1371 | } |
1372 | |
1373 | void MallocChecker::checkGetdelim(const CallEvent &Call, |
1374 | CheckerContext &C) const { |
1375 | // Discard calls to the C++ standard library function std::getline(), which |
1376 | // is completely unrelated to the POSIX getline() that we're checking. |
1377 | if (isFromStdNamespace(Call)) |
1378 | return; |
1379 | |
1380 | ProgramStateRef State = C.getState(); |
1381 | // Handle the post-conditions of getline and getdelim: |
1382 | // Register the new conjured value as an allocated buffer. |
1383 | const CallExpr *CE = dyn_cast_or_null<CallExpr>(Val: Call.getOriginExpr()); |
1384 | if (!CE) |
1385 | return; |
1386 | |
1387 | SValBuilder &SVB = C.getSValBuilder(); |
1388 | |
1389 | const auto LinePtr = |
1390 | getPointeeVal(PtrSVal: Call.getArgSVal(Index: 0), State)->getAs<DefinedSVal>(); |
1391 | const auto Size = |
1392 | getPointeeVal(PtrSVal: Call.getArgSVal(Index: 1), State)->getAs<DefinedSVal>(); |
1393 | if (!LinePtr || !Size || !LinePtr->getAsRegion()) |
1394 | return; |
1395 | |
1396 | State = setDynamicExtent(State, MR: LinePtr->getAsRegion(), Extent: *Size, SVB); |
1397 | C.addTransition(State: MallocUpdateRefState(C, E: CE, State, Family: AF_Malloc, RetVal: *LinePtr)); |
1398 | } |
1399 | |
1400 | void MallocChecker::checkReallocN(const CallEvent &Call, |
1401 | CheckerContext &C) const { |
1402 | ProgramStateRef State = C.getState(); |
1403 | State = ReallocMemAux(C, Call, /*ShouldFreeOnFail=*/false, State, Family: AF_Malloc, |
1404 | /*SuffixWithN=*/true); |
1405 | State = ProcessZeroAllocCheck(Call, IndexOfSizeArg: 1, State); |
1406 | State = ProcessZeroAllocCheck(Call, IndexOfSizeArg: 2, State); |
1407 | C.addTransition(State); |
1408 | } |
1409 | |
1410 | void MallocChecker::checkOwnershipAttr(const CallEvent &Call, |
1411 | CheckerContext &C) const { |
1412 | ProgramStateRef State = C.getState(); |
1413 | const auto *CE = dyn_cast_or_null<CallExpr>(Val: Call.getOriginExpr()); |
1414 | if (!CE) |
1415 | return; |
1416 | const FunctionDecl *FD = C.getCalleeDecl(CE); |
1417 | if (!FD) |
1418 | return; |
1419 | if (ShouldIncludeOwnershipAnnotatedFunctions || |
1420 | ChecksEnabled[CK_MismatchedDeallocatorChecker]) { |
1421 | // Check all the attributes, if there are any. |
1422 | // There can be multiple of these attributes. |
1423 | if (FD->hasAttrs()) |
1424 | for (const auto *I : FD->specific_attrs<OwnershipAttr>()) { |
1425 | switch (I->getOwnKind()) { |
1426 | case OwnershipAttr::Returns: |
1427 | State = MallocMemReturnsAttr(C, Call, Att: I, State); |
1428 | break; |
1429 | case OwnershipAttr::Takes: |
1430 | case OwnershipAttr::Holds: |
1431 | State = FreeMemAttr(C, Call, Att: I, State); |
1432 | break; |
1433 | } |
1434 | } |
1435 | } |
1436 | C.addTransition(State); |
1437 | } |
1438 | |
1439 | void MallocChecker::checkPostCall(const CallEvent &Call, |
1440 | CheckerContext &C) const { |
1441 | if (C.wasInlined) |
1442 | return; |
1443 | if (!Call.getOriginExpr()) |
1444 | return; |
1445 | |
1446 | ProgramStateRef State = C.getState(); |
1447 | |
1448 | if (const CheckFn *Callback = FreeingMemFnMap.lookup(Call)) { |
1449 | (*Callback)(this, Call, C); |
1450 | return; |
1451 | } |
1452 | |
1453 | if (const CheckFn *Callback = AllocatingMemFnMap.lookup(Call)) { |
1454 | (*Callback)(this, Call, C); |
1455 | return; |
1456 | } |
1457 | |
1458 | if (const CheckFn *Callback = ReallocatingMemFnMap.lookup(Call)) { |
1459 | (*Callback)(this, Call, C); |
1460 | return; |
1461 | } |
1462 | |
1463 | if (isStandardNewDelete(Call)) { |
1464 | checkCXXNewOrCXXDelete(Call, C); |
1465 | return; |
1466 | } |
1467 | |
1468 | checkOwnershipAttr(Call, C); |
1469 | } |
1470 | |
1471 | // Performs a 0-sized allocations check. |
1472 | ProgramStateRef MallocChecker::ProcessZeroAllocCheck( |
1473 | const CallEvent &Call, const unsigned IndexOfSizeArg, ProgramStateRef State, |
1474 | std::optional<SVal> RetVal) { |
1475 | if (!State) |
1476 | return nullptr; |
1477 | |
1478 | if (!RetVal) |
1479 | RetVal = Call.getReturnValue(); |
1480 | |
1481 | const Expr *Arg = nullptr; |
1482 | |
1483 | if (const CallExpr *CE = dyn_cast<CallExpr>(Val: Call.getOriginExpr())) { |
1484 | Arg = CE->getArg(Arg: IndexOfSizeArg); |
1485 | } else if (const CXXNewExpr *NE = |
1486 | dyn_cast<CXXNewExpr>(Val: Call.getOriginExpr())) { |
1487 | if (NE->isArray()) { |
1488 | Arg = *NE->getArraySize(); |
1489 | } else { |
1490 | return State; |
1491 | } |
1492 | } else |
1493 | llvm_unreachable("not a CallExpr or CXXNewExpr" ); |
1494 | |
1495 | assert(Arg); |
1496 | |
1497 | auto DefArgVal = |
1498 | State->getSVal(Ex: Arg, LCtx: Call.getLocationContext()).getAs<DefinedSVal>(); |
1499 | |
1500 | if (!DefArgVal) |
1501 | return State; |
1502 | |
1503 | // Check if the allocation size is 0. |
1504 | ProgramStateRef TrueState, FalseState; |
1505 | SValBuilder &SvalBuilder = State->getStateManager().getSValBuilder(); |
1506 | DefinedSVal Zero = |
1507 | SvalBuilder.makeZeroVal(type: Arg->getType()).castAs<DefinedSVal>(); |
1508 | |
1509 | std::tie(args&: TrueState, args&: FalseState) = |
1510 | State->assume(Cond: SvalBuilder.evalEQ(state: State, lhs: *DefArgVal, rhs: Zero)); |
1511 | |
1512 | if (TrueState && !FalseState) { |
1513 | SymbolRef Sym = RetVal->getAsLocSymbol(); |
1514 | if (!Sym) |
1515 | return State; |
1516 | |
1517 | const RefState *RS = State->get<RegionState>(key: Sym); |
1518 | if (RS) { |
1519 | if (RS->isAllocated()) |
1520 | return TrueState->set<RegionState>(K: Sym, |
1521 | E: RefState::getAllocatedOfSizeZero(RS)); |
1522 | else |
1523 | return State; |
1524 | } else { |
1525 | // Case of zero-size realloc. Historically 'realloc(ptr, 0)' is treated as |
1526 | // 'free(ptr)' and the returned value from 'realloc(ptr, 0)' is not |
1527 | // tracked. Add zero-reallocated Sym to the state to catch references |
1528 | // to zero-allocated memory. |
1529 | return TrueState->add<ReallocSizeZeroSymbols>(K: Sym); |
1530 | } |
1531 | } |
1532 | |
1533 | // Assume the value is non-zero going forward. |
1534 | assert(FalseState); |
1535 | return FalseState; |
1536 | } |
1537 | |
1538 | static QualType getDeepPointeeType(QualType T) { |
1539 | QualType Result = T, PointeeType = T->getPointeeType(); |
1540 | while (!PointeeType.isNull()) { |
1541 | Result = PointeeType; |
1542 | PointeeType = PointeeType->getPointeeType(); |
1543 | } |
1544 | return Result; |
1545 | } |
1546 | |
1547 | /// \returns true if the constructor invoked by \p NE has an argument of a |
1548 | /// pointer/reference to a record type. |
1549 | static bool hasNonTrivialConstructorCall(const CXXNewExpr *NE) { |
1550 | |
1551 | const CXXConstructExpr *ConstructE = NE->getConstructExpr(); |
1552 | if (!ConstructE) |
1553 | return false; |
1554 | |
1555 | if (!NE->getAllocatedType()->getAsCXXRecordDecl()) |
1556 | return false; |
1557 | |
1558 | const CXXConstructorDecl *CtorD = ConstructE->getConstructor(); |
1559 | |
1560 | // Iterate over the constructor parameters. |
1561 | for (const auto *CtorParam : CtorD->parameters()) { |
1562 | |
1563 | QualType CtorParamPointeeT = CtorParam->getType()->getPointeeType(); |
1564 | if (CtorParamPointeeT.isNull()) |
1565 | continue; |
1566 | |
1567 | CtorParamPointeeT = getDeepPointeeType(T: CtorParamPointeeT); |
1568 | |
1569 | if (CtorParamPointeeT->getAsCXXRecordDecl()) |
1570 | return true; |
1571 | } |
1572 | |
1573 | return false; |
1574 | } |
1575 | |
1576 | ProgramStateRef |
1577 | MallocChecker::processNewAllocation(const CXXAllocatorCall &Call, |
1578 | CheckerContext &C, |
1579 | AllocationFamily Family) const { |
1580 | if (!isStandardNewDelete(Call)) |
1581 | return nullptr; |
1582 | |
1583 | const CXXNewExpr *NE = Call.getOriginExpr(); |
1584 | const ParentMap &PM = C.getLocationContext()->getParentMap(); |
1585 | ProgramStateRef State = C.getState(); |
1586 | |
1587 | // Non-trivial constructors have a chance to escape 'this', but marking all |
1588 | // invocations of trivial constructors as escaped would cause too great of |
1589 | // reduction of true positives, so let's just do that for constructors that |
1590 | // have an argument of a pointer-to-record type. |
1591 | if (!PM.isConsumedExpr(E: NE) && hasNonTrivialConstructorCall(NE)) |
1592 | return State; |
1593 | |
1594 | // The return value from operator new is bound to a specified initialization |
1595 | // value (if any) and we don't want to loose this value. So we call |
1596 | // MallocUpdateRefState() instead of MallocMemAux() which breaks the |
1597 | // existing binding. |
1598 | SVal Target = Call.getObjectUnderConstruction(); |
1599 | if (Call.getOriginExpr()->isArray()) { |
1600 | if (auto SizeEx = NE->getArraySize()) |
1601 | checkTaintedness(C, Call, SizeSVal: C.getSVal(S: *SizeEx), State, Family: AF_CXXNewArray); |
1602 | } |
1603 | |
1604 | State = MallocUpdateRefState(C, E: NE, State, Family, RetVal: Target); |
1605 | State = ProcessZeroAllocCheck(Call, IndexOfSizeArg: 0, State, RetVal: Target); |
1606 | return State; |
1607 | } |
1608 | |
1609 | void MallocChecker::checkNewAllocator(const CXXAllocatorCall &Call, |
1610 | CheckerContext &C) const { |
1611 | if (!C.wasInlined) { |
1612 | ProgramStateRef State = processNewAllocation( |
1613 | Call, C, |
1614 | Family: (Call.getOriginExpr()->isArray() ? AF_CXXNewArray : AF_CXXNew)); |
1615 | C.addTransition(State); |
1616 | } |
1617 | } |
1618 | |
1619 | static bool isKnownDeallocObjCMethodName(const ObjCMethodCall &Call) { |
1620 | // If the first selector piece is one of the names below, assume that the |
1621 | // object takes ownership of the memory, promising to eventually deallocate it |
1622 | // with free(). |
1623 | // Ex: [NSData dataWithBytesNoCopy:bytes length:10]; |
1624 | // (...unless a 'freeWhenDone' parameter is false, but that's checked later.) |
1625 | StringRef FirstSlot = Call.getSelector().getNameForSlot(argIndex: 0); |
1626 | return FirstSlot == "dataWithBytesNoCopy" || |
1627 | FirstSlot == "initWithBytesNoCopy" || |
1628 | FirstSlot == "initWithCharactersNoCopy" ; |
1629 | } |
1630 | |
1631 | static std::optional<bool> getFreeWhenDoneArg(const ObjCMethodCall &Call) { |
1632 | Selector S = Call.getSelector(); |
1633 | |
1634 | // FIXME: We should not rely on fully-constrained symbols being folded. |
1635 | for (unsigned i = 1; i < S.getNumArgs(); ++i) |
1636 | if (S.getNameForSlot(argIndex: i) == "freeWhenDone" ) |
1637 | return !Call.getArgSVal(Index: i).isZeroConstant(); |
1638 | |
1639 | return std::nullopt; |
1640 | } |
1641 | |
1642 | void MallocChecker::checkPostObjCMessage(const ObjCMethodCall &Call, |
1643 | CheckerContext &C) const { |
1644 | if (C.wasInlined) |
1645 | return; |
1646 | |
1647 | if (!isKnownDeallocObjCMethodName(Call)) |
1648 | return; |
1649 | |
1650 | if (std::optional<bool> FreeWhenDone = getFreeWhenDoneArg(Call)) |
1651 | if (!*FreeWhenDone) |
1652 | return; |
1653 | |
1654 | if (Call.hasNonZeroCallbackArg()) |
1655 | return; |
1656 | |
1657 | bool IsKnownToBeAllocatedMemory; |
1658 | ProgramStateRef State = |
1659 | FreeMemAux(C, ArgExpr: Call.getArgExpr(Index: 0), Call, State: C.getState(), |
1660 | /*Hold=*/true, IsKnownToBeAllocated&: IsKnownToBeAllocatedMemory, Family: AF_Malloc, |
1661 | /*ReturnsNullOnFailure=*/true); |
1662 | |
1663 | C.addTransition(State); |
1664 | } |
1665 | |
1666 | ProgramStateRef |
1667 | MallocChecker::MallocMemReturnsAttr(CheckerContext &C, const CallEvent &Call, |
1668 | const OwnershipAttr *Att, |
1669 | ProgramStateRef State) const { |
1670 | if (!State) |
1671 | return nullptr; |
1672 | |
1673 | if (Att->getModule()->getName() != "malloc" ) |
1674 | return nullptr; |
1675 | |
1676 | if (!Att->args().empty()) { |
1677 | return MallocMemAux(C, Call, |
1678 | SizeEx: Call.getArgExpr(Index: Att->args_begin()->getASTIndex()), |
1679 | Init: UndefinedVal(), State, Family: AF_Malloc); |
1680 | } |
1681 | return MallocMemAux(C, Call, Size: UnknownVal(), Init: UndefinedVal(), State, Family: AF_Malloc); |
1682 | } |
1683 | |
1684 | ProgramStateRef MallocChecker::MallocMemAux(CheckerContext &C, |
1685 | const CallEvent &Call, |
1686 | const Expr *SizeEx, SVal Init, |
1687 | ProgramStateRef State, |
1688 | AllocationFamily Family) const { |
1689 | if (!State) |
1690 | return nullptr; |
1691 | |
1692 | assert(SizeEx); |
1693 | return MallocMemAux(C, Call, Size: C.getSVal(S: SizeEx), Init, State, Family); |
1694 | } |
1695 | |
1696 | void MallocChecker::reportTaintBug(StringRef Msg, ProgramStateRef State, |
1697 | CheckerContext &C, |
1698 | llvm::ArrayRef<SymbolRef> TaintedSyms, |
1699 | AllocationFamily Family) const { |
1700 | if (ExplodedNode *N = C.generateNonFatalErrorNode(State, Tag: this)) { |
1701 | if (!BT_TaintedAlloc) |
1702 | BT_TaintedAlloc.reset(p: new BugType(CheckNames[CK_TaintedAllocChecker], |
1703 | "Tainted Memory Allocation" , |
1704 | categories::TaintedData)); |
1705 | auto R = std::make_unique<PathSensitiveBugReport>(args&: *BT_TaintedAlloc, args&: Msg, args&: N); |
1706 | for (auto TaintedSym : TaintedSyms) { |
1707 | R->markInteresting(sym: TaintedSym); |
1708 | } |
1709 | C.emitReport(R: std::move(R)); |
1710 | } |
1711 | } |
1712 | |
1713 | void MallocChecker::checkTaintedness(CheckerContext &C, const CallEvent &Call, |
1714 | const SVal SizeSVal, ProgramStateRef State, |
1715 | AllocationFamily Family) const { |
1716 | if (!ChecksEnabled[CK_TaintedAllocChecker]) |
1717 | return; |
1718 | std::vector<SymbolRef> TaintedSyms = |
1719 | taint::getTaintedSymbols(State, V: SizeSVal); |
1720 | if (TaintedSyms.empty()) |
1721 | return; |
1722 | |
1723 | SValBuilder &SVB = C.getSValBuilder(); |
1724 | QualType SizeTy = SVB.getContext().getSizeType(); |
1725 | QualType CmpTy = SVB.getConditionType(); |
1726 | // In case the symbol is tainted, we give a warning if the |
1727 | // size is larger than SIZE_MAX/4 |
1728 | BasicValueFactory &BVF = SVB.getBasicValueFactory(); |
1729 | const llvm::APSInt MaxValInt = BVF.getMaxValue(T: SizeTy); |
1730 | NonLoc MaxLength = |
1731 | SVB.makeIntVal(integer: MaxValInt / APSIntType(MaxValInt).getValue(RawValue: 4)); |
1732 | std::optional<NonLoc> SizeNL = SizeSVal.getAs<NonLoc>(); |
1733 | auto Cmp = SVB.evalBinOpNN(state: State, op: BO_GE, lhs: *SizeNL, rhs: MaxLength, resultTy: CmpTy) |
1734 | .getAs<DefinedOrUnknownSVal>(); |
1735 | if (!Cmp) |
1736 | return; |
1737 | auto [StateTooLarge, StateNotTooLarge] = State->assume(Cond: *Cmp); |
1738 | if (!StateTooLarge && StateNotTooLarge) { |
1739 | // We can prove that size is not too large so there is no issue. |
1740 | return; |
1741 | } |
1742 | |
1743 | std::string Callee = "Memory allocation function" ; |
1744 | if (Call.getCalleeIdentifier()) |
1745 | Callee = Call.getCalleeIdentifier()->getName().str(); |
1746 | reportTaintBug( |
1747 | Msg: Callee + " is called with a tainted (potentially attacker controlled) " |
1748 | "value. Make sure the value is bound checked." , |
1749 | State, C, TaintedSyms, Family); |
1750 | } |
1751 | |
1752 | ProgramStateRef MallocChecker::MallocMemAux(CheckerContext &C, |
1753 | const CallEvent &Call, SVal Size, |
1754 | SVal Init, ProgramStateRef State, |
1755 | AllocationFamily Family) const { |
1756 | if (!State) |
1757 | return nullptr; |
1758 | |
1759 | const Expr *CE = Call.getOriginExpr(); |
1760 | |
1761 | // We expect the malloc functions to return a pointer. |
1762 | if (!Loc::isLocType(T: CE->getType())) |
1763 | return nullptr; |
1764 | |
1765 | // Bind the return value to the symbolic value from the heap region. |
1766 | // TODO: move use of this functions to an EvalCall callback, becasue |
1767 | // BindExpr() should'nt be used elsewhere. |
1768 | unsigned Count = C.blockCount(); |
1769 | SValBuilder &SVB = C.getSValBuilder(); |
1770 | const LocationContext *LCtx = C.getPredecessor()->getLocationContext(); |
1771 | DefinedSVal RetVal = |
1772 | ((Family == AF_Alloca) ? SVB.getAllocaRegionVal(E: CE, LCtx, Count) |
1773 | : SVB.getConjuredHeapSymbolVal(E: CE, LCtx, Count) |
1774 | .castAs<DefinedSVal>()); |
1775 | State = State->BindExpr(S: CE, LCtx: C.getLocationContext(), V: RetVal); |
1776 | |
1777 | // Fill the region with the initialization value. |
1778 | State = State->bindDefaultInitial(loc: RetVal, V: Init, LCtx); |
1779 | |
1780 | // If Size is somehow undefined at this point, this line prevents a crash. |
1781 | if (Size.isUndef()) |
1782 | Size = UnknownVal(); |
1783 | |
1784 | checkTaintedness(C, Call, SizeSVal: Size, State, Family: AF_Malloc); |
1785 | |
1786 | // Set the region's extent. |
1787 | State = setDynamicExtent(State, MR: RetVal.getAsRegion(), |
1788 | Extent: Size.castAs<DefinedOrUnknownSVal>(), SVB); |
1789 | |
1790 | return MallocUpdateRefState(C, E: CE, State, Family); |
1791 | } |
1792 | |
1793 | static ProgramStateRef MallocUpdateRefState(CheckerContext &C, const Expr *E, |
1794 | ProgramStateRef State, |
1795 | AllocationFamily Family, |
1796 | std::optional<SVal> RetVal) { |
1797 | if (!State) |
1798 | return nullptr; |
1799 | |
1800 | // Get the return value. |
1801 | if (!RetVal) |
1802 | RetVal = C.getSVal(S: E); |
1803 | |
1804 | // We expect the malloc functions to return a pointer. |
1805 | if (!RetVal->getAs<Loc>()) |
1806 | return nullptr; |
1807 | |
1808 | SymbolRef Sym = RetVal->getAsLocSymbol(); |
1809 | |
1810 | // This is a return value of a function that was not inlined, such as malloc() |
1811 | // or new(). We've checked that in the caller. Therefore, it must be a symbol. |
1812 | assert(Sym); |
1813 | // FIXME: In theory this assertion should fail for `alloca()` calls (because |
1814 | // `AllocaRegion`s are not symbolic); but in practice this does not happen. |
1815 | // As the current code appears to work correctly, I'm not touching this issue |
1816 | // now, but it would be good to investigate and clarify this. |
1817 | // Also note that perhaps the special `AllocaRegion` should be replaced by |
1818 | // `SymbolicRegion` (or turned into a subclass of `SymbolicRegion`) to enable |
1819 | // proper tracking of memory allocated by `alloca()` -- and after that change |
1820 | // this assertion would become valid again. |
1821 | |
1822 | // Set the symbol's state to Allocated. |
1823 | return State->set<RegionState>(K: Sym, E: RefState::getAllocated(family: Family, s: E)); |
1824 | } |
1825 | |
1826 | ProgramStateRef MallocChecker::FreeMemAttr(CheckerContext &C, |
1827 | const CallEvent &Call, |
1828 | const OwnershipAttr *Att, |
1829 | ProgramStateRef State) const { |
1830 | if (!State) |
1831 | return nullptr; |
1832 | |
1833 | if (Att->getModule()->getName() != "malloc" ) |
1834 | return nullptr; |
1835 | |
1836 | bool IsKnownToBeAllocated = false; |
1837 | |
1838 | for (const auto &Arg : Att->args()) { |
1839 | ProgramStateRef StateI = |
1840 | FreeMemAux(C, Call, State, Num: Arg.getASTIndex(), |
1841 | Hold: Att->getOwnKind() == OwnershipAttr::Holds, |
1842 | IsKnownToBeAllocated, Family: AF_Malloc); |
1843 | if (StateI) |
1844 | State = StateI; |
1845 | } |
1846 | return State; |
1847 | } |
1848 | |
1849 | ProgramStateRef MallocChecker::FreeMemAux(CheckerContext &C, |
1850 | const CallEvent &Call, |
1851 | ProgramStateRef State, unsigned Num, |
1852 | bool Hold, bool &IsKnownToBeAllocated, |
1853 | AllocationFamily Family, |
1854 | bool ReturnsNullOnFailure) const { |
1855 | if (!State) |
1856 | return nullptr; |
1857 | |
1858 | if (Call.getNumArgs() < (Num + 1)) |
1859 | return nullptr; |
1860 | |
1861 | return FreeMemAux(C, ArgExpr: Call.getArgExpr(Index: Num), Call, State, Hold, |
1862 | IsKnownToBeAllocated, Family, ReturnsNullOnFailure); |
1863 | } |
1864 | |
1865 | /// Checks if the previous call to free on the given symbol failed - if free |
1866 | /// failed, returns true. Also, returns the corresponding return value symbol. |
1867 | static bool didPreviousFreeFail(ProgramStateRef State, |
1868 | SymbolRef Sym, SymbolRef &RetStatusSymbol) { |
1869 | const SymbolRef *Ret = State->get<FreeReturnValue>(key: Sym); |
1870 | if (Ret) { |
1871 | assert(*Ret && "We should not store the null return symbol" ); |
1872 | ConstraintManager &CMgr = State->getConstraintManager(); |
1873 | ConditionTruthVal FreeFailed = CMgr.isNull(State, Sym: *Ret); |
1874 | RetStatusSymbol = *Ret; |
1875 | return FreeFailed.isConstrainedTrue(); |
1876 | } |
1877 | return false; |
1878 | } |
1879 | |
1880 | static bool printMemFnName(raw_ostream &os, CheckerContext &C, const Expr *E) { |
1881 | if (const CallExpr *CE = dyn_cast<CallExpr>(Val: E)) { |
1882 | // FIXME: This doesn't handle indirect calls. |
1883 | const FunctionDecl *FD = CE->getDirectCallee(); |
1884 | if (!FD) |
1885 | return false; |
1886 | |
1887 | os << *FD; |
1888 | if (!FD->isOverloadedOperator()) |
1889 | os << "()" ; |
1890 | return true; |
1891 | } |
1892 | |
1893 | if (const ObjCMessageExpr *Msg = dyn_cast<ObjCMessageExpr>(Val: E)) { |
1894 | if (Msg->isInstanceMessage()) |
1895 | os << "-" ; |
1896 | else |
1897 | os << "+" ; |
1898 | Msg->getSelector().print(OS&: os); |
1899 | return true; |
1900 | } |
1901 | |
1902 | if (const CXXNewExpr *NE = dyn_cast<CXXNewExpr>(Val: E)) { |
1903 | os << "'" |
1904 | << getOperatorSpelling(Operator: NE->getOperatorNew()->getOverloadedOperator()) |
1905 | << "'" ; |
1906 | return true; |
1907 | } |
1908 | |
1909 | if (const CXXDeleteExpr *DE = dyn_cast<CXXDeleteExpr>(Val: E)) { |
1910 | os << "'" |
1911 | << getOperatorSpelling(Operator: DE->getOperatorDelete()->getOverloadedOperator()) |
1912 | << "'" ; |
1913 | return true; |
1914 | } |
1915 | |
1916 | return false; |
1917 | } |
1918 | |
1919 | static void printExpectedAllocName(raw_ostream &os, AllocationFamily Family) { |
1920 | |
1921 | switch(Family) { |
1922 | case AF_Malloc: os << "malloc()" ; return; |
1923 | case AF_CXXNew: os << "'new'" ; return; |
1924 | case AF_CXXNewArray: os << "'new[]'" ; return; |
1925 | case AF_IfNameIndex: os << "'if_nameindex()'" ; return; |
1926 | case AF_InnerBuffer: os << "container-specific allocator" ; return; |
1927 | case AF_Alloca: |
1928 | case AF_None: llvm_unreachable("not a deallocation expression" ); |
1929 | } |
1930 | } |
1931 | |
1932 | static void printExpectedDeallocName(raw_ostream &os, AllocationFamily Family) { |
1933 | switch(Family) { |
1934 | case AF_Malloc: os << "free()" ; return; |
1935 | case AF_CXXNew: os << "'delete'" ; return; |
1936 | case AF_CXXNewArray: os << "'delete[]'" ; return; |
1937 | case AF_IfNameIndex: os << "'if_freenameindex()'" ; return; |
1938 | case AF_InnerBuffer: os << "container-specific deallocator" ; return; |
1939 | case AF_Alloca: |
1940 | case AF_None: llvm_unreachable("suspicious argument" ); |
1941 | } |
1942 | } |
1943 | |
1944 | ProgramStateRef |
1945 | MallocChecker::FreeMemAux(CheckerContext &C, const Expr *ArgExpr, |
1946 | const CallEvent &Call, ProgramStateRef State, |
1947 | bool Hold, bool &IsKnownToBeAllocated, |
1948 | AllocationFamily Family, bool ReturnsNullOnFailure, |
1949 | std::optional<SVal> ArgValOpt) const { |
1950 | |
1951 | if (!State) |
1952 | return nullptr; |
1953 | |
1954 | SVal ArgVal = ArgValOpt.value_or(u: C.getSVal(S: ArgExpr)); |
1955 | if (!isa<DefinedOrUnknownSVal>(Val: ArgVal)) |
1956 | return nullptr; |
1957 | DefinedOrUnknownSVal location = ArgVal.castAs<DefinedOrUnknownSVal>(); |
1958 | |
1959 | // Check for null dereferences. |
1960 | if (!isa<Loc>(Val: location)) |
1961 | return nullptr; |
1962 | |
1963 | // The explicit NULL case, no operation is performed. |
1964 | ProgramStateRef notNullState, nullState; |
1965 | std::tie(args&: notNullState, args&: nullState) = State->assume(Cond: location); |
1966 | if (nullState && !notNullState) |
1967 | return nullptr; |
1968 | |
1969 | // Unknown values could easily be okay |
1970 | // Undefined values are handled elsewhere |
1971 | if (ArgVal.isUnknownOrUndef()) |
1972 | return nullptr; |
1973 | |
1974 | const MemRegion *R = ArgVal.getAsRegion(); |
1975 | const Expr *ParentExpr = Call.getOriginExpr(); |
1976 | |
1977 | // NOTE: We detected a bug, but the checker under whose name we would emit the |
1978 | // error could be disabled. Generally speaking, the MallocChecker family is an |
1979 | // integral part of the Static Analyzer, and disabling any part of it should |
1980 | // only be done under exceptional circumstances, such as frequent false |
1981 | // positives. If this is the case, we can reasonably believe that there are |
1982 | // serious faults in our understanding of the source code, and even if we |
1983 | // don't emit an warning, we should terminate further analysis with a sink |
1984 | // node. |
1985 | |
1986 | // Nonlocs can't be freed, of course. |
1987 | // Non-region locations (labels and fixed addresses) also shouldn't be freed. |
1988 | if (!R) { |
1989 | // Exception: |
1990 | // If the macro ZERO_SIZE_PTR is defined, this could be a kernel source |
1991 | // code. In that case, the ZERO_SIZE_PTR defines a special value used for a |
1992 | // zero-sized memory block which is allowed to be freed, despite not being a |
1993 | // null pointer. |
1994 | if (Family != AF_Malloc || !isArgZERO_SIZE_PTR(State, C, ArgVal)) |
1995 | HandleNonHeapDealloc(C, ArgVal, Range: ArgExpr->getSourceRange(), DeallocExpr: ParentExpr, |
1996 | Family); |
1997 | return nullptr; |
1998 | } |
1999 | |
2000 | R = R->StripCasts(); |
2001 | |
2002 | // Blocks might show up as heap data, but should not be free()d |
2003 | if (isa<BlockDataRegion>(Val: R)) { |
2004 | HandleNonHeapDealloc(C, ArgVal, Range: ArgExpr->getSourceRange(), DeallocExpr: ParentExpr, |
2005 | Family); |
2006 | return nullptr; |
2007 | } |
2008 | |
2009 | const MemSpaceRegion *MS = R->getMemorySpace(); |
2010 | |
2011 | // Parameters, locals, statics, globals, and memory returned by |
2012 | // __builtin_alloca() shouldn't be freed. |
2013 | if (!isa<UnknownSpaceRegion, HeapSpaceRegion>(Val: MS)) { |
2014 | // Regions returned by malloc() are represented by SymbolicRegion objects |
2015 | // within HeapSpaceRegion. Of course, free() can work on memory allocated |
2016 | // outside the current function, so UnknownSpaceRegion is also a |
2017 | // possibility here. |
2018 | |
2019 | if (isa<AllocaRegion>(Val: R)) |
2020 | HandleFreeAlloca(C, ArgVal, Range: ArgExpr->getSourceRange()); |
2021 | else |
2022 | HandleNonHeapDealloc(C, ArgVal, Range: ArgExpr->getSourceRange(), DeallocExpr: ParentExpr, |
2023 | Family); |
2024 | |
2025 | return nullptr; |
2026 | } |
2027 | |
2028 | const SymbolicRegion *SrBase = dyn_cast<SymbolicRegion>(Val: R->getBaseRegion()); |
2029 | // Various cases could lead to non-symbol values here. |
2030 | // For now, ignore them. |
2031 | if (!SrBase) |
2032 | return nullptr; |
2033 | |
2034 | SymbolRef SymBase = SrBase->getSymbol(); |
2035 | const RefState *RsBase = State->get<RegionState>(key: SymBase); |
2036 | SymbolRef PreviousRetStatusSymbol = nullptr; |
2037 | |
2038 | IsKnownToBeAllocated = |
2039 | RsBase && (RsBase->isAllocated() || RsBase->isAllocatedOfSizeZero()); |
2040 | |
2041 | if (RsBase) { |
2042 | |
2043 | // Memory returned by alloca() shouldn't be freed. |
2044 | if (RsBase->getAllocationFamily() == AF_Alloca) { |
2045 | HandleFreeAlloca(C, ArgVal, Range: ArgExpr->getSourceRange()); |
2046 | return nullptr; |
2047 | } |
2048 | |
2049 | // Check for double free first. |
2050 | if ((RsBase->isReleased() || RsBase->isRelinquished()) && |
2051 | !didPreviousFreeFail(State, Sym: SymBase, RetStatusSymbol&: PreviousRetStatusSymbol)) { |
2052 | HandleDoubleFree(C, Range: ParentExpr->getSourceRange(), Released: RsBase->isReleased(), |
2053 | Sym: SymBase, PrevSym: PreviousRetStatusSymbol); |
2054 | return nullptr; |
2055 | |
2056 | // If the pointer is allocated or escaped, but we are now trying to free it, |
2057 | // check that the call to free is proper. |
2058 | } else if (RsBase->isAllocated() || RsBase->isAllocatedOfSizeZero() || |
2059 | RsBase->isEscaped()) { |
2060 | |
2061 | // Check if an expected deallocation function matches the real one. |
2062 | bool DeallocMatchesAlloc = RsBase->getAllocationFamily() == Family; |
2063 | if (!DeallocMatchesAlloc) { |
2064 | HandleMismatchedDealloc(C, Range: ArgExpr->getSourceRange(), DeallocExpr: ParentExpr, |
2065 | RS: RsBase, Sym: SymBase, OwnershipTransferred: Hold); |
2066 | return nullptr; |
2067 | } |
2068 | |
2069 | // Check if the memory location being freed is the actual location |
2070 | // allocated, or an offset. |
2071 | RegionOffset Offset = R->getAsOffset(); |
2072 | if (Offset.isValid() && |
2073 | !Offset.hasSymbolicOffset() && |
2074 | Offset.getOffset() != 0) { |
2075 | const Expr *AllocExpr = cast<Expr>(Val: RsBase->getStmt()); |
2076 | HandleOffsetFree(C, ArgVal, Range: ArgExpr->getSourceRange(), DeallocExpr: ParentExpr, |
2077 | Family, AllocExpr); |
2078 | return nullptr; |
2079 | } |
2080 | } |
2081 | } |
2082 | |
2083 | if (SymBase->getType()->isFunctionPointerType()) { |
2084 | HandleFunctionPtrFree(C, ArgVal, Range: ArgExpr->getSourceRange(), FreeExpr: ParentExpr, |
2085 | Family); |
2086 | return nullptr; |
2087 | } |
2088 | |
2089 | // Clean out the info on previous call to free return info. |
2090 | State = State->remove<FreeReturnValue>(K: SymBase); |
2091 | |
2092 | // Keep track of the return value. If it is NULL, we will know that free |
2093 | // failed. |
2094 | if (ReturnsNullOnFailure) { |
2095 | SVal RetVal = C.getSVal(S: ParentExpr); |
2096 | SymbolRef RetStatusSymbol = RetVal.getAsSymbol(); |
2097 | if (RetStatusSymbol) { |
2098 | C.getSymbolManager().addSymbolDependency(Primary: SymBase, Dependent: RetStatusSymbol); |
2099 | State = State->set<FreeReturnValue>(K: SymBase, E: RetStatusSymbol); |
2100 | } |
2101 | } |
2102 | |
2103 | // If we don't know anything about this symbol, a free on it may be totally |
2104 | // valid. If this is the case, lets assume that the allocation family of the |
2105 | // freeing function is the same as the symbols allocation family, and go with |
2106 | // that. |
2107 | assert(!RsBase || (RsBase && RsBase->getAllocationFamily() == Family)); |
2108 | |
2109 | // Normal free. |
2110 | if (Hold) |
2111 | return State->set<RegionState>(K: SymBase, |
2112 | E: RefState::getRelinquished(family: Family, |
2113 | s: ParentExpr)); |
2114 | |
2115 | return State->set<RegionState>(K: SymBase, |
2116 | E: RefState::getReleased(family: Family, s: ParentExpr)); |
2117 | } |
2118 | |
2119 | std::optional<MallocChecker::CheckKind> |
2120 | MallocChecker::getCheckIfTracked(AllocationFamily Family, |
2121 | bool IsALeakCheck) const { |
2122 | switch (Family) { |
2123 | case AF_Malloc: |
2124 | case AF_Alloca: |
2125 | case AF_IfNameIndex: { |
2126 | if (ChecksEnabled[CK_MallocChecker]) |
2127 | return CK_MallocChecker; |
2128 | return std::nullopt; |
2129 | } |
2130 | case AF_CXXNew: |
2131 | case AF_CXXNewArray: { |
2132 | if (IsALeakCheck) { |
2133 | if (ChecksEnabled[CK_NewDeleteLeaksChecker]) |
2134 | return CK_NewDeleteLeaksChecker; |
2135 | } |
2136 | else { |
2137 | if (ChecksEnabled[CK_NewDeleteChecker]) |
2138 | return CK_NewDeleteChecker; |
2139 | } |
2140 | return std::nullopt; |
2141 | } |
2142 | case AF_InnerBuffer: { |
2143 | if (ChecksEnabled[CK_InnerPointerChecker]) |
2144 | return CK_InnerPointerChecker; |
2145 | return std::nullopt; |
2146 | } |
2147 | case AF_None: { |
2148 | llvm_unreachable("no family" ); |
2149 | } |
2150 | } |
2151 | llvm_unreachable("unhandled family" ); |
2152 | } |
2153 | |
2154 | std::optional<MallocChecker::CheckKind> |
2155 | MallocChecker::getCheckIfTracked(CheckerContext &C, SymbolRef Sym, |
2156 | bool IsALeakCheck) const { |
2157 | if (C.getState()->contains<ReallocSizeZeroSymbols>(key: Sym)) |
2158 | return CK_MallocChecker; |
2159 | |
2160 | const RefState *RS = C.getState()->get<RegionState>(key: Sym); |
2161 | assert(RS); |
2162 | return getCheckIfTracked(Family: RS->getAllocationFamily(), IsALeakCheck); |
2163 | } |
2164 | |
2165 | bool MallocChecker::SummarizeValue(raw_ostream &os, SVal V) { |
2166 | if (std::optional<nonloc::ConcreteInt> IntVal = |
2167 | V.getAs<nonloc::ConcreteInt>()) |
2168 | os << "an integer (" << IntVal->getValue() << ")" ; |
2169 | else if (std::optional<loc::ConcreteInt> ConstAddr = |
2170 | V.getAs<loc::ConcreteInt>()) |
2171 | os << "a constant address (" << ConstAddr->getValue() << ")" ; |
2172 | else if (std::optional<loc::GotoLabel> Label = V.getAs<loc::GotoLabel>()) |
2173 | os << "the address of the label '" << Label->getLabel()->getName() << "'" ; |
2174 | else |
2175 | return false; |
2176 | |
2177 | return true; |
2178 | } |
2179 | |
2180 | bool MallocChecker::SummarizeRegion(raw_ostream &os, |
2181 | const MemRegion *MR) { |
2182 | switch (MR->getKind()) { |
2183 | case MemRegion::FunctionCodeRegionKind: { |
2184 | const NamedDecl *FD = cast<FunctionCodeRegion>(Val: MR)->getDecl(); |
2185 | if (FD) |
2186 | os << "the address of the function '" << *FD << '\''; |
2187 | else |
2188 | os << "the address of a function" ; |
2189 | return true; |
2190 | } |
2191 | case MemRegion::BlockCodeRegionKind: |
2192 | os << "block text" ; |
2193 | return true; |
2194 | case MemRegion::BlockDataRegionKind: |
2195 | // FIXME: where the block came from? |
2196 | os << "a block" ; |
2197 | return true; |
2198 | default: { |
2199 | const MemSpaceRegion *MS = MR->getMemorySpace(); |
2200 | |
2201 | if (isa<StackLocalsSpaceRegion>(Val: MS)) { |
2202 | const VarRegion *VR = dyn_cast<VarRegion>(Val: MR); |
2203 | const VarDecl *VD; |
2204 | if (VR) |
2205 | VD = VR->getDecl(); |
2206 | else |
2207 | VD = nullptr; |
2208 | |
2209 | if (VD) |
2210 | os << "the address of the local variable '" << VD->getName() << "'" ; |
2211 | else |
2212 | os << "the address of a local stack variable" ; |
2213 | return true; |
2214 | } |
2215 | |
2216 | if (isa<StackArgumentsSpaceRegion>(Val: MS)) { |
2217 | const VarRegion *VR = dyn_cast<VarRegion>(Val: MR); |
2218 | const VarDecl *VD; |
2219 | if (VR) |
2220 | VD = VR->getDecl(); |
2221 | else |
2222 | VD = nullptr; |
2223 | |
2224 | if (VD) |
2225 | os << "the address of the parameter '" << VD->getName() << "'" ; |
2226 | else |
2227 | os << "the address of a parameter" ; |
2228 | return true; |
2229 | } |
2230 | |
2231 | if (isa<GlobalsSpaceRegion>(Val: MS)) { |
2232 | const VarRegion *VR = dyn_cast<VarRegion>(Val: MR); |
2233 | const VarDecl *VD; |
2234 | if (VR) |
2235 | VD = VR->getDecl(); |
2236 | else |
2237 | VD = nullptr; |
2238 | |
2239 | if (VD) { |
2240 | if (VD->isStaticLocal()) |
2241 | os << "the address of the static variable '" << VD->getName() << "'" ; |
2242 | else |
2243 | os << "the address of the global variable '" << VD->getName() << "'" ; |
2244 | } else |
2245 | os << "the address of a global variable" ; |
2246 | return true; |
2247 | } |
2248 | |
2249 | return false; |
2250 | } |
2251 | } |
2252 | } |
2253 | |
2254 | void MallocChecker::HandleNonHeapDealloc(CheckerContext &C, SVal ArgVal, |
2255 | SourceRange Range, |
2256 | const Expr *DeallocExpr, |
2257 | AllocationFamily Family) const { |
2258 | |
2259 | if (!ChecksEnabled[CK_MallocChecker] && !ChecksEnabled[CK_NewDeleteChecker]) { |
2260 | C.addSink(); |
2261 | return; |
2262 | } |
2263 | |
2264 | std::optional<MallocChecker::CheckKind> CheckKind = getCheckIfTracked(Family); |
2265 | if (!CheckKind) |
2266 | return; |
2267 | |
2268 | if (ExplodedNode *N = C.generateErrorNode()) { |
2269 | if (!BT_BadFree[*CheckKind]) |
2270 | BT_BadFree[*CheckKind].reset(p: new BugType( |
2271 | CheckNames[*CheckKind], "Bad free" , categories::MemoryError)); |
2272 | |
2273 | SmallString<100> buf; |
2274 | llvm::raw_svector_ostream os(buf); |
2275 | |
2276 | const MemRegion *MR = ArgVal.getAsRegion(); |
2277 | while (const ElementRegion *ER = dyn_cast_or_null<ElementRegion>(Val: MR)) |
2278 | MR = ER->getSuperRegion(); |
2279 | |
2280 | os << "Argument to " ; |
2281 | if (!printMemFnName(os, C, E: DeallocExpr)) |
2282 | os << "deallocator" ; |
2283 | |
2284 | os << " is " ; |
2285 | bool Summarized = MR ? SummarizeRegion(os, MR) |
2286 | : SummarizeValue(os, V: ArgVal); |
2287 | if (Summarized) |
2288 | os << ", which is not memory allocated by " ; |
2289 | else |
2290 | os << "not memory allocated by " ; |
2291 | |
2292 | printExpectedAllocName(os, Family); |
2293 | |
2294 | auto R = std::make_unique<PathSensitiveBugReport>(args&: *BT_BadFree[*CheckKind], |
2295 | args: os.str(), args&: N); |
2296 | R->markInteresting(R: MR); |
2297 | R->addRange(R: Range); |
2298 | C.emitReport(R: std::move(R)); |
2299 | } |
2300 | } |
2301 | |
2302 | void MallocChecker::HandleFreeAlloca(CheckerContext &C, SVal ArgVal, |
2303 | SourceRange Range) const { |
2304 | |
2305 | std::optional<MallocChecker::CheckKind> CheckKind; |
2306 | |
2307 | if (ChecksEnabled[CK_MallocChecker]) |
2308 | CheckKind = CK_MallocChecker; |
2309 | else if (ChecksEnabled[CK_MismatchedDeallocatorChecker]) |
2310 | CheckKind = CK_MismatchedDeallocatorChecker; |
2311 | else { |
2312 | C.addSink(); |
2313 | return; |
2314 | } |
2315 | |
2316 | if (ExplodedNode *N = C.generateErrorNode()) { |
2317 | if (!BT_FreeAlloca[*CheckKind]) |
2318 | BT_FreeAlloca[*CheckKind].reset(p: new BugType( |
2319 | CheckNames[*CheckKind], "Free alloca()" , categories::MemoryError)); |
2320 | |
2321 | auto R = std::make_unique<PathSensitiveBugReport>( |
2322 | args&: *BT_FreeAlloca[*CheckKind], |
2323 | args: "Memory allocated by alloca() should not be deallocated" , args&: N); |
2324 | R->markInteresting(R: ArgVal.getAsRegion()); |
2325 | R->addRange(R: Range); |
2326 | C.emitReport(R: std::move(R)); |
2327 | } |
2328 | } |
2329 | |
2330 | void MallocChecker::HandleMismatchedDealloc(CheckerContext &C, |
2331 | SourceRange Range, |
2332 | const Expr *DeallocExpr, |
2333 | const RefState *RS, SymbolRef Sym, |
2334 | bool OwnershipTransferred) const { |
2335 | |
2336 | if (!ChecksEnabled[CK_MismatchedDeallocatorChecker]) { |
2337 | C.addSink(); |
2338 | return; |
2339 | } |
2340 | |
2341 | if (ExplodedNode *N = C.generateErrorNode()) { |
2342 | if (!BT_MismatchedDealloc) |
2343 | BT_MismatchedDealloc.reset( |
2344 | p: new BugType(CheckNames[CK_MismatchedDeallocatorChecker], |
2345 | "Bad deallocator" , categories::MemoryError)); |
2346 | |
2347 | SmallString<100> buf; |
2348 | llvm::raw_svector_ostream os(buf); |
2349 | |
2350 | const Expr *AllocExpr = cast<Expr>(Val: RS->getStmt()); |
2351 | SmallString<20> AllocBuf; |
2352 | llvm::raw_svector_ostream AllocOs(AllocBuf); |
2353 | SmallString<20> DeallocBuf; |
2354 | llvm::raw_svector_ostream DeallocOs(DeallocBuf); |
2355 | |
2356 | if (OwnershipTransferred) { |
2357 | if (printMemFnName(os&: DeallocOs, C, E: DeallocExpr)) |
2358 | os << DeallocOs.str() << " cannot" ; |
2359 | else |
2360 | os << "Cannot" ; |
2361 | |
2362 | os << " take ownership of memory" ; |
2363 | |
2364 | if (printMemFnName(os&: AllocOs, C, E: AllocExpr)) |
2365 | os << " allocated by " << AllocOs.str(); |
2366 | } else { |
2367 | os << "Memory" ; |
2368 | if (printMemFnName(os&: AllocOs, C, E: AllocExpr)) |
2369 | os << " allocated by " << AllocOs.str(); |
2370 | |
2371 | os << " should be deallocated by " ; |
2372 | printExpectedDeallocName(os, Family: RS->getAllocationFamily()); |
2373 | |
2374 | if (printMemFnName(os&: DeallocOs, C, E: DeallocExpr)) |
2375 | os << ", not " << DeallocOs.str(); |
2376 | } |
2377 | |
2378 | auto R = std::make_unique<PathSensitiveBugReport>(args&: *BT_MismatchedDealloc, |
2379 | args: os.str(), args&: N); |
2380 | R->markInteresting(sym: Sym); |
2381 | R->addRange(R: Range); |
2382 | R->addVisitor<MallocBugVisitor>(ConstructorArgs&: Sym); |
2383 | C.emitReport(R: std::move(R)); |
2384 | } |
2385 | } |
2386 | |
2387 | void MallocChecker::HandleOffsetFree(CheckerContext &C, SVal ArgVal, |
2388 | SourceRange Range, const Expr *DeallocExpr, |
2389 | AllocationFamily Family, |
2390 | const Expr *AllocExpr) const { |
2391 | |
2392 | if (!ChecksEnabled[CK_MallocChecker] && !ChecksEnabled[CK_NewDeleteChecker]) { |
2393 | C.addSink(); |
2394 | return; |
2395 | } |
2396 | |
2397 | std::optional<MallocChecker::CheckKind> CheckKind = getCheckIfTracked(Family); |
2398 | if (!CheckKind) |
2399 | return; |
2400 | |
2401 | ExplodedNode *N = C.generateErrorNode(); |
2402 | if (!N) |
2403 | return; |
2404 | |
2405 | if (!BT_OffsetFree[*CheckKind]) |
2406 | BT_OffsetFree[*CheckKind].reset(p: new BugType( |
2407 | CheckNames[*CheckKind], "Offset free" , categories::MemoryError)); |
2408 | |
2409 | SmallString<100> buf; |
2410 | llvm::raw_svector_ostream os(buf); |
2411 | SmallString<20> AllocNameBuf; |
2412 | llvm::raw_svector_ostream AllocNameOs(AllocNameBuf); |
2413 | |
2414 | const MemRegion *MR = ArgVal.getAsRegion(); |
2415 | assert(MR && "Only MemRegion based symbols can have offset free errors" ); |
2416 | |
2417 | RegionOffset Offset = MR->getAsOffset(); |
2418 | assert((Offset.isValid() && |
2419 | !Offset.hasSymbolicOffset() && |
2420 | Offset.getOffset() != 0) && |
2421 | "Only symbols with a valid offset can have offset free errors" ); |
2422 | |
2423 | int offsetBytes = Offset.getOffset() / C.getASTContext().getCharWidth(); |
2424 | |
2425 | os << "Argument to " ; |
2426 | if (!printMemFnName(os, C, E: DeallocExpr)) |
2427 | os << "deallocator" ; |
2428 | os << " is offset by " |
2429 | << offsetBytes |
2430 | << " " |
2431 | << ((abs(x: offsetBytes) > 1) ? "bytes" : "byte" ) |
2432 | << " from the start of " ; |
2433 | if (AllocExpr && printMemFnName(os&: AllocNameOs, C, E: AllocExpr)) |
2434 | os << "memory allocated by " << AllocNameOs.str(); |
2435 | else |
2436 | os << "allocated memory" ; |
2437 | |
2438 | auto R = std::make_unique<PathSensitiveBugReport>(args&: *BT_OffsetFree[*CheckKind], |
2439 | args: os.str(), args&: N); |
2440 | R->markInteresting(R: MR->getBaseRegion()); |
2441 | R->addRange(R: Range); |
2442 | C.emitReport(R: std::move(R)); |
2443 | } |
2444 | |
2445 | void MallocChecker::HandleUseAfterFree(CheckerContext &C, SourceRange Range, |
2446 | SymbolRef Sym) const { |
2447 | |
2448 | if (!ChecksEnabled[CK_MallocChecker] && !ChecksEnabled[CK_NewDeleteChecker] && |
2449 | !ChecksEnabled[CK_InnerPointerChecker]) { |
2450 | C.addSink(); |
2451 | return; |
2452 | } |
2453 | |
2454 | std::optional<MallocChecker::CheckKind> CheckKind = getCheckIfTracked(C, Sym); |
2455 | if (!CheckKind) |
2456 | return; |
2457 | |
2458 | if (ExplodedNode *N = C.generateErrorNode()) { |
2459 | if (!BT_UseFree[*CheckKind]) |
2460 | BT_UseFree[*CheckKind].reset(p: new BugType( |
2461 | CheckNames[*CheckKind], "Use-after-free" , categories::MemoryError)); |
2462 | |
2463 | AllocationFamily AF = |
2464 | C.getState()->get<RegionState>(key: Sym)->getAllocationFamily(); |
2465 | |
2466 | auto R = std::make_unique<PathSensitiveBugReport>( |
2467 | args&: *BT_UseFree[*CheckKind], |
2468 | args: AF == AF_InnerBuffer |
2469 | ? "Inner pointer of container used after re/deallocation" |
2470 | : "Use of memory after it is freed" , |
2471 | args&: N); |
2472 | |
2473 | R->markInteresting(sym: Sym); |
2474 | R->addRange(R: Range); |
2475 | R->addVisitor<MallocBugVisitor>(ConstructorArgs&: Sym); |
2476 | |
2477 | if (AF == AF_InnerBuffer) |
2478 | R->addVisitor(visitor: allocation_state::getInnerPointerBRVisitor(Sym)); |
2479 | |
2480 | C.emitReport(R: std::move(R)); |
2481 | } |
2482 | } |
2483 | |
2484 | void MallocChecker::HandleDoubleFree(CheckerContext &C, SourceRange Range, |
2485 | bool Released, SymbolRef Sym, |
2486 | SymbolRef PrevSym) const { |
2487 | |
2488 | if (!ChecksEnabled[CK_MallocChecker] && !ChecksEnabled[CK_NewDeleteChecker]) { |
2489 | C.addSink(); |
2490 | return; |
2491 | } |
2492 | |
2493 | std::optional<MallocChecker::CheckKind> CheckKind = getCheckIfTracked(C, Sym); |
2494 | if (!CheckKind) |
2495 | return; |
2496 | |
2497 | if (ExplodedNode *N = C.generateErrorNode()) { |
2498 | if (!BT_DoubleFree[*CheckKind]) |
2499 | BT_DoubleFree[*CheckKind].reset(p: new BugType( |
2500 | CheckNames[*CheckKind], "Double free" , categories::MemoryError)); |
2501 | |
2502 | auto R = std::make_unique<PathSensitiveBugReport>( |
2503 | args&: *BT_DoubleFree[*CheckKind], |
2504 | args: (Released ? "Attempt to free released memory" |
2505 | : "Attempt to free non-owned memory" ), |
2506 | args&: N); |
2507 | R->addRange(R: Range); |
2508 | R->markInteresting(sym: Sym); |
2509 | if (PrevSym) |
2510 | R->markInteresting(sym: PrevSym); |
2511 | R->addVisitor<MallocBugVisitor>(ConstructorArgs&: Sym); |
2512 | C.emitReport(R: std::move(R)); |
2513 | } |
2514 | } |
2515 | |
2516 | void MallocChecker::HandleDoubleDelete(CheckerContext &C, SymbolRef Sym) const { |
2517 | |
2518 | if (!ChecksEnabled[CK_NewDeleteChecker]) { |
2519 | C.addSink(); |
2520 | return; |
2521 | } |
2522 | |
2523 | std::optional<MallocChecker::CheckKind> CheckKind = getCheckIfTracked(C, Sym); |
2524 | if (!CheckKind) |
2525 | return; |
2526 | |
2527 | if (ExplodedNode *N = C.generateErrorNode()) { |
2528 | if (!BT_DoubleDelete) |
2529 | BT_DoubleDelete.reset(p: new BugType(CheckNames[CK_NewDeleteChecker], |
2530 | "Double delete" , |
2531 | categories::MemoryError)); |
2532 | |
2533 | auto R = std::make_unique<PathSensitiveBugReport>( |
2534 | args&: *BT_DoubleDelete, args: "Attempt to delete released memory" , args&: N); |
2535 | |
2536 | R->markInteresting(sym: Sym); |
2537 | R->addVisitor<MallocBugVisitor>(ConstructorArgs&: Sym); |
2538 | C.emitReport(R: std::move(R)); |
2539 | } |
2540 | } |
2541 | |
2542 | void MallocChecker::HandleUseZeroAlloc(CheckerContext &C, SourceRange Range, |
2543 | SymbolRef Sym) const { |
2544 | |
2545 | if (!ChecksEnabled[CK_MallocChecker] && !ChecksEnabled[CK_NewDeleteChecker]) { |
2546 | C.addSink(); |
2547 | return; |
2548 | } |
2549 | |
2550 | std::optional<MallocChecker::CheckKind> CheckKind = getCheckIfTracked(C, Sym); |
2551 | |
2552 | if (!CheckKind) |
2553 | return; |
2554 | |
2555 | if (ExplodedNode *N = C.generateErrorNode()) { |
2556 | if (!BT_UseZerroAllocated[*CheckKind]) |
2557 | BT_UseZerroAllocated[*CheckKind].reset( |
2558 | p: new BugType(CheckNames[*CheckKind], "Use of zero allocated" , |
2559 | categories::MemoryError)); |
2560 | |
2561 | auto R = std::make_unique<PathSensitiveBugReport>( |
2562 | args&: *BT_UseZerroAllocated[*CheckKind], |
2563 | args: "Use of memory allocated with size zero" , args&: N); |
2564 | |
2565 | R->addRange(R: Range); |
2566 | if (Sym) { |
2567 | R->markInteresting(sym: Sym); |
2568 | R->addVisitor<MallocBugVisitor>(ConstructorArgs&: Sym); |
2569 | } |
2570 | C.emitReport(R: std::move(R)); |
2571 | } |
2572 | } |
2573 | |
2574 | void MallocChecker::HandleFunctionPtrFree(CheckerContext &C, SVal ArgVal, |
2575 | SourceRange Range, |
2576 | const Expr *FreeExpr, |
2577 | AllocationFamily Family) const { |
2578 | if (!ChecksEnabled[CK_MallocChecker]) { |
2579 | C.addSink(); |
2580 | return; |
2581 | } |
2582 | |
2583 | std::optional<MallocChecker::CheckKind> CheckKind = getCheckIfTracked(Family); |
2584 | if (!CheckKind) |
2585 | return; |
2586 | |
2587 | if (ExplodedNode *N = C.generateErrorNode()) { |
2588 | if (!BT_BadFree[*CheckKind]) |
2589 | BT_BadFree[*CheckKind].reset(p: new BugType( |
2590 | CheckNames[*CheckKind], "Bad free" , categories::MemoryError)); |
2591 | |
2592 | SmallString<100> Buf; |
2593 | llvm::raw_svector_ostream Os(Buf); |
2594 | |
2595 | const MemRegion *MR = ArgVal.getAsRegion(); |
2596 | while (const ElementRegion *ER = dyn_cast_or_null<ElementRegion>(Val: MR)) |
2597 | MR = ER->getSuperRegion(); |
2598 | |
2599 | Os << "Argument to " ; |
2600 | if (!printMemFnName(os&: Os, C, E: FreeExpr)) |
2601 | Os << "deallocator" ; |
2602 | |
2603 | Os << " is a function pointer" ; |
2604 | |
2605 | auto R = std::make_unique<PathSensitiveBugReport>(args&: *BT_BadFree[*CheckKind], |
2606 | args: Os.str(), args&: N); |
2607 | R->markInteresting(R: MR); |
2608 | R->addRange(R: Range); |
2609 | C.emitReport(R: std::move(R)); |
2610 | } |
2611 | } |
2612 | |
2613 | ProgramStateRef |
2614 | MallocChecker::ReallocMemAux(CheckerContext &C, const CallEvent &Call, |
2615 | bool ShouldFreeOnFail, ProgramStateRef State, |
2616 | AllocationFamily Family, bool SuffixWithN) const { |
2617 | if (!State) |
2618 | return nullptr; |
2619 | |
2620 | const CallExpr *CE = cast<CallExpr>(Val: Call.getOriginExpr()); |
2621 | |
2622 | if (SuffixWithN && CE->getNumArgs() < 3) |
2623 | return nullptr; |
2624 | else if (CE->getNumArgs() < 2) |
2625 | return nullptr; |
2626 | |
2627 | const Expr *arg0Expr = CE->getArg(Arg: 0); |
2628 | SVal Arg0Val = C.getSVal(S: arg0Expr); |
2629 | if (!isa<DefinedOrUnknownSVal>(Val: Arg0Val)) |
2630 | return nullptr; |
2631 | DefinedOrUnknownSVal arg0Val = Arg0Val.castAs<DefinedOrUnknownSVal>(); |
2632 | |
2633 | SValBuilder &svalBuilder = C.getSValBuilder(); |
2634 | |
2635 | DefinedOrUnknownSVal PtrEQ = svalBuilder.evalEQ( |
2636 | state: State, lhs: arg0Val, rhs: svalBuilder.makeNullWithType(type: arg0Expr->getType())); |
2637 | |
2638 | // Get the size argument. |
2639 | const Expr *Arg1 = CE->getArg(Arg: 1); |
2640 | |
2641 | // Get the value of the size argument. |
2642 | SVal TotalSize = C.getSVal(S: Arg1); |
2643 | if (SuffixWithN) |
2644 | TotalSize = evalMulForBufferSize(C, Blocks: Arg1, BlockBytes: CE->getArg(Arg: 2)); |
2645 | if (!isa<DefinedOrUnknownSVal>(Val: TotalSize)) |
2646 | return nullptr; |
2647 | |
2648 | // Compare the size argument to 0. |
2649 | DefinedOrUnknownSVal SizeZero = |
2650 | svalBuilder.evalEQ(state: State, lhs: TotalSize.castAs<DefinedOrUnknownSVal>(), |
2651 | rhs: svalBuilder.makeIntValWithWidth( |
2652 | ptrType: svalBuilder.getContext().getSizeType(), integer: 0)); |
2653 | |
2654 | ProgramStateRef StatePtrIsNull, StatePtrNotNull; |
2655 | std::tie(args&: StatePtrIsNull, args&: StatePtrNotNull) = State->assume(Cond: PtrEQ); |
2656 | ProgramStateRef StateSizeIsZero, StateSizeNotZero; |
2657 | std::tie(args&: StateSizeIsZero, args&: StateSizeNotZero) = State->assume(Cond: SizeZero); |
2658 | // We only assume exceptional states if they are definitely true; if the |
2659 | // state is under-constrained, assume regular realloc behavior. |
2660 | bool PrtIsNull = StatePtrIsNull && !StatePtrNotNull; |
2661 | bool SizeIsZero = StateSizeIsZero && !StateSizeNotZero; |
2662 | |
2663 | // If the ptr is NULL and the size is not 0, the call is equivalent to |
2664 | // malloc(size). |
2665 | if (PrtIsNull && !SizeIsZero) { |
2666 | ProgramStateRef stateMalloc = MallocMemAux( |
2667 | C, Call, Size: TotalSize, Init: UndefinedVal(), State: StatePtrIsNull, Family); |
2668 | return stateMalloc; |
2669 | } |
2670 | |
2671 | if (PrtIsNull && SizeIsZero) |
2672 | return State; |
2673 | |
2674 | assert(!PrtIsNull); |
2675 | |
2676 | bool IsKnownToBeAllocated = false; |
2677 | |
2678 | // If the size is 0, free the memory. |
2679 | if (SizeIsZero) |
2680 | // The semantics of the return value are: |
2681 | // If size was equal to 0, either NULL or a pointer suitable to be passed |
2682 | // to free() is returned. We just free the input pointer and do not add |
2683 | // any constrains on the output pointer. |
2684 | if (ProgramStateRef stateFree = FreeMemAux( |
2685 | C, Call, State: StateSizeIsZero, Num: 0, Hold: false, IsKnownToBeAllocated, Family)) |
2686 | return stateFree; |
2687 | |
2688 | // Default behavior. |
2689 | if (ProgramStateRef stateFree = |
2690 | FreeMemAux(C, Call, State, Num: 0, Hold: false, IsKnownToBeAllocated, Family)) { |
2691 | |
2692 | ProgramStateRef stateRealloc = |
2693 | MallocMemAux(C, Call, Size: TotalSize, Init: UnknownVal(), State: stateFree, Family); |
2694 | if (!stateRealloc) |
2695 | return nullptr; |
2696 | |
2697 | OwnershipAfterReallocKind Kind = OAR_ToBeFreedAfterFailure; |
2698 | if (ShouldFreeOnFail) |
2699 | Kind = OAR_FreeOnFailure; |
2700 | else if (!IsKnownToBeAllocated) |
2701 | Kind = OAR_DoNotTrackAfterFailure; |
2702 | |
2703 | // Get the from and to pointer symbols as in toPtr = realloc(fromPtr, size). |
2704 | SymbolRef FromPtr = arg0Val.getLocSymbolInBase(); |
2705 | SVal RetVal = C.getSVal(S: CE); |
2706 | SymbolRef ToPtr = RetVal.getAsSymbol(); |
2707 | assert(FromPtr && ToPtr && |
2708 | "By this point, FreeMemAux and MallocMemAux should have checked " |
2709 | "whether the argument or the return value is symbolic!" ); |
2710 | |
2711 | // Record the info about the reallocated symbol so that we could properly |
2712 | // process failed reallocation. |
2713 | stateRealloc = stateRealloc->set<ReallocPairs>(K: ToPtr, |
2714 | E: ReallocPair(FromPtr, Kind)); |
2715 | // The reallocated symbol should stay alive for as long as the new symbol. |
2716 | C.getSymbolManager().addSymbolDependency(Primary: ToPtr, Dependent: FromPtr); |
2717 | return stateRealloc; |
2718 | } |
2719 | return nullptr; |
2720 | } |
2721 | |
2722 | ProgramStateRef MallocChecker::CallocMem(CheckerContext &C, |
2723 | const CallEvent &Call, |
2724 | ProgramStateRef State) const { |
2725 | if (!State) |
2726 | return nullptr; |
2727 | |
2728 | if (Call.getNumArgs() < 2) |
2729 | return nullptr; |
2730 | |
2731 | SValBuilder &svalBuilder = C.getSValBuilder(); |
2732 | SVal zeroVal = svalBuilder.makeZeroVal(type: svalBuilder.getContext().CharTy); |
2733 | SVal TotalSize = |
2734 | evalMulForBufferSize(C, Blocks: Call.getArgExpr(Index: 0), BlockBytes: Call.getArgExpr(Index: 1)); |
2735 | |
2736 | return MallocMemAux(C, Call, Size: TotalSize, Init: zeroVal, State, Family: AF_Malloc); |
2737 | } |
2738 | |
2739 | MallocChecker::LeakInfo MallocChecker::getAllocationSite(const ExplodedNode *N, |
2740 | SymbolRef Sym, |
2741 | CheckerContext &C) { |
2742 | const LocationContext *LeakContext = N->getLocationContext(); |
2743 | // Walk the ExplodedGraph backwards and find the first node that referred to |
2744 | // the tracked symbol. |
2745 | const ExplodedNode *AllocNode = N; |
2746 | const MemRegion *ReferenceRegion = nullptr; |
2747 | |
2748 | while (N) { |
2749 | ProgramStateRef State = N->getState(); |
2750 | if (!State->get<RegionState>(key: Sym)) |
2751 | break; |
2752 | |
2753 | // Find the most recent expression bound to the symbol in the current |
2754 | // context. |
2755 | if (!ReferenceRegion) { |
2756 | if (const MemRegion *MR = C.getLocationRegionIfPostStore(N)) { |
2757 | SVal Val = State->getSVal(R: MR); |
2758 | if (Val.getAsLocSymbol() == Sym) { |
2759 | const VarRegion *VR = MR->getBaseRegion()->getAs<VarRegion>(); |
2760 | // Do not show local variables belonging to a function other than |
2761 | // where the error is reported. |
2762 | if (!VR || (VR->getStackFrame() == LeakContext->getStackFrame())) |
2763 | ReferenceRegion = MR; |
2764 | } |
2765 | } |
2766 | } |
2767 | |
2768 | // Allocation node, is the last node in the current or parent context in |
2769 | // which the symbol was tracked. |
2770 | const LocationContext *NContext = N->getLocationContext(); |
2771 | if (NContext == LeakContext || |
2772 | NContext->isParentOf(LC: LeakContext)) |
2773 | AllocNode = N; |
2774 | N = N->pred_empty() ? nullptr : *(N->pred_begin()); |
2775 | } |
2776 | |
2777 | return LeakInfo(AllocNode, ReferenceRegion); |
2778 | } |
2779 | |
2780 | void MallocChecker::HandleLeak(SymbolRef Sym, ExplodedNode *N, |
2781 | CheckerContext &C) const { |
2782 | |
2783 | if (!ChecksEnabled[CK_MallocChecker] && |
2784 | !ChecksEnabled[CK_NewDeleteLeaksChecker]) |
2785 | return; |
2786 | |
2787 | const RefState *RS = C.getState()->get<RegionState>(key: Sym); |
2788 | assert(RS && "cannot leak an untracked symbol" ); |
2789 | AllocationFamily Family = RS->getAllocationFamily(); |
2790 | |
2791 | if (Family == AF_Alloca) |
2792 | return; |
2793 | |
2794 | std::optional<MallocChecker::CheckKind> CheckKind = |
2795 | getCheckIfTracked(Family, IsALeakCheck: true); |
2796 | |
2797 | if (!CheckKind) |
2798 | return; |
2799 | |
2800 | assert(N); |
2801 | if (!BT_Leak[*CheckKind]) { |
2802 | // Leaks should not be reported if they are post-dominated by a sink: |
2803 | // (1) Sinks are higher importance bugs. |
2804 | // (2) NoReturnFunctionChecker uses sink nodes to represent paths ending |
2805 | // with __noreturn functions such as assert() or exit(). We choose not |
2806 | // to report leaks on such paths. |
2807 | BT_Leak[*CheckKind].reset(p: new BugType(CheckNames[*CheckKind], "Memory leak" , |
2808 | categories::MemoryError, |
2809 | /*SuppressOnSink=*/true)); |
2810 | } |
2811 | |
2812 | // Most bug reports are cached at the location where they occurred. |
2813 | // With leaks, we want to unique them by the location where they were |
2814 | // allocated, and only report a single path. |
2815 | PathDiagnosticLocation LocUsedForUniqueing; |
2816 | const ExplodedNode *AllocNode = nullptr; |
2817 | const MemRegion *Region = nullptr; |
2818 | std::tie(args&: AllocNode, args&: Region) = getAllocationSite(N, Sym, C); |
2819 | |
2820 | const Stmt *AllocationStmt = AllocNode->getStmtForDiagnostics(); |
2821 | if (AllocationStmt) |
2822 | LocUsedForUniqueing = PathDiagnosticLocation::createBegin(S: AllocationStmt, |
2823 | SM: C.getSourceManager(), |
2824 | LAC: AllocNode->getLocationContext()); |
2825 | |
2826 | SmallString<200> buf; |
2827 | llvm::raw_svector_ostream os(buf); |
2828 | if (Region && Region->canPrintPretty()) { |
2829 | os << "Potential leak of memory pointed to by " ; |
2830 | Region->printPretty(os); |
2831 | } else { |
2832 | os << "Potential memory leak" ; |
2833 | } |
2834 | |
2835 | auto R = std::make_unique<PathSensitiveBugReport>( |
2836 | args&: *BT_Leak[*CheckKind], args: os.str(), args&: N, args&: LocUsedForUniqueing, |
2837 | args: AllocNode->getLocationContext()->getDecl()); |
2838 | R->markInteresting(sym: Sym); |
2839 | R->addVisitor<MallocBugVisitor>(ConstructorArgs&: Sym, ConstructorArgs: true); |
2840 | if (ShouldRegisterNoOwnershipChangeVisitor) |
2841 | R->addVisitor<NoMemOwnershipChangeVisitor>(ConstructorArgs&: Sym, ConstructorArgs: this); |
2842 | C.emitReport(R: std::move(R)); |
2843 | } |
2844 | |
2845 | void MallocChecker::checkDeadSymbols(SymbolReaper &SymReaper, |
2846 | CheckerContext &C) const |
2847 | { |
2848 | ProgramStateRef state = C.getState(); |
2849 | RegionStateTy OldRS = state->get<RegionState>(); |
2850 | RegionStateTy::Factory &F = state->get_context<RegionState>(); |
2851 | |
2852 | RegionStateTy RS = OldRS; |
2853 | SmallVector<SymbolRef, 2> Errors; |
2854 | for (auto [Sym, State] : RS) { |
2855 | if (SymReaper.isDead(sym: Sym)) { |
2856 | if (State.isAllocated() || State.isAllocatedOfSizeZero()) |
2857 | Errors.push_back(Elt: Sym); |
2858 | // Remove the dead symbol from the map. |
2859 | RS = F.remove(Old: RS, K: Sym); |
2860 | } |
2861 | } |
2862 | |
2863 | if (RS == OldRS) { |
2864 | // We shouldn't have touched other maps yet. |
2865 | assert(state->get<ReallocPairs>() == |
2866 | C.getState()->get<ReallocPairs>()); |
2867 | assert(state->get<FreeReturnValue>() == |
2868 | C.getState()->get<FreeReturnValue>()); |
2869 | return; |
2870 | } |
2871 | |
2872 | // Cleanup the Realloc Pairs Map. |
2873 | ReallocPairsTy RP = state->get<ReallocPairs>(); |
2874 | for (auto [Sym, ReallocPair] : RP) { |
2875 | if (SymReaper.isDead(sym: Sym) || SymReaper.isDead(sym: ReallocPair.ReallocatedSym)) { |
2876 | state = state->remove<ReallocPairs>(K: Sym); |
2877 | } |
2878 | } |
2879 | |
2880 | // Cleanup the FreeReturnValue Map. |
2881 | FreeReturnValueTy FR = state->get<FreeReturnValue>(); |
2882 | for (auto [Sym, RetSym] : FR) { |
2883 | if (SymReaper.isDead(sym: Sym) || SymReaper.isDead(sym: RetSym)) { |
2884 | state = state->remove<FreeReturnValue>(K: Sym); |
2885 | } |
2886 | } |
2887 | |
2888 | // Generate leak node. |
2889 | ExplodedNode *N = C.getPredecessor(); |
2890 | if (!Errors.empty()) { |
2891 | static CheckerProgramPointTag Tag("MallocChecker" , "DeadSymbolsLeak" ); |
2892 | N = C.generateNonFatalErrorNode(State: C.getState(), Tag: &Tag); |
2893 | if (N) { |
2894 | for (SymbolRef Sym : Errors) { |
2895 | HandleLeak(Sym, N, C); |
2896 | } |
2897 | } |
2898 | } |
2899 | |
2900 | C.addTransition(State: state->set<RegionState>(RS), Pred: N); |
2901 | } |
2902 | |
2903 | void MallocChecker::checkPreCall(const CallEvent &Call, |
2904 | CheckerContext &C) const { |
2905 | |
2906 | if (const auto *DC = dyn_cast<CXXDeallocatorCall>(Val: &Call)) { |
2907 | const CXXDeleteExpr *DE = DC->getOriginExpr(); |
2908 | |
2909 | if (!ChecksEnabled[CK_NewDeleteChecker]) |
2910 | if (SymbolRef Sym = C.getSVal(S: DE->getArgument()).getAsSymbol()) |
2911 | checkUseAfterFree(Sym, C, S: DE->getArgument()); |
2912 | |
2913 | if (!isStandardNewDelete(FD: DC->getDecl())) |
2914 | return; |
2915 | |
2916 | ProgramStateRef State = C.getState(); |
2917 | bool IsKnownToBeAllocated; |
2918 | State = FreeMemAux(C, ArgExpr: DE->getArgument(), Call, State, |
2919 | /*Hold*/ false, IsKnownToBeAllocated, |
2920 | Family: (DE->isArrayForm() ? AF_CXXNewArray : AF_CXXNew)); |
2921 | |
2922 | C.addTransition(State); |
2923 | return; |
2924 | } |
2925 | |
2926 | if (const auto *DC = dyn_cast<CXXDestructorCall>(Val: &Call)) { |
2927 | SymbolRef Sym = DC->getCXXThisVal().getAsSymbol(); |
2928 | if (!Sym || checkDoubleDelete(Sym, C)) |
2929 | return; |
2930 | } |
2931 | |
2932 | // We need to handle getline pre-conditions here before the pointed region |
2933 | // gets invalidated by StreamChecker |
2934 | if (const auto *PreFN = PreFnMap.lookup(Call)) { |
2935 | (*PreFN)(this, Call, C); |
2936 | return; |
2937 | } |
2938 | |
2939 | // We will check for double free in the post visit. |
2940 | if (const AnyFunctionCall *FC = dyn_cast<AnyFunctionCall>(Val: &Call)) { |
2941 | const FunctionDecl *FD = FC->getDecl(); |
2942 | if (!FD) |
2943 | return; |
2944 | |
2945 | if (ChecksEnabled[CK_MallocChecker] && isFreeingCall(Call)) |
2946 | return; |
2947 | } |
2948 | |
2949 | // Check if the callee of a method is deleted. |
2950 | if (const CXXInstanceCall *CC = dyn_cast<CXXInstanceCall>(Val: &Call)) { |
2951 | SymbolRef Sym = CC->getCXXThisVal().getAsSymbol(); |
2952 | if (!Sym || checkUseAfterFree(Sym, C, S: CC->getCXXThisExpr())) |
2953 | return; |
2954 | } |
2955 | |
2956 | // Check arguments for being used after free. |
2957 | for (unsigned I = 0, E = Call.getNumArgs(); I != E; ++I) { |
2958 | SVal ArgSVal = Call.getArgSVal(Index: I); |
2959 | if (isa<Loc>(Val: ArgSVal)) { |
2960 | SymbolRef Sym = ArgSVal.getAsSymbol(); |
2961 | if (!Sym) |
2962 | continue; |
2963 | if (checkUseAfterFree(Sym, C, S: Call.getArgExpr(Index: I))) |
2964 | return; |
2965 | } |
2966 | } |
2967 | } |
2968 | |
2969 | void MallocChecker::checkPreStmt(const ReturnStmt *S, |
2970 | CheckerContext &C) const { |
2971 | checkEscapeOnReturn(S, C); |
2972 | } |
2973 | |
2974 | // In the CFG, automatic destructors come after the return statement. |
2975 | // This callback checks for returning memory that is freed by automatic |
2976 | // destructors, as those cannot be reached in checkPreStmt(). |
2977 | void MallocChecker::checkEndFunction(const ReturnStmt *S, |
2978 | CheckerContext &C) const { |
2979 | checkEscapeOnReturn(S, C); |
2980 | } |
2981 | |
2982 | void MallocChecker::checkEscapeOnReturn(const ReturnStmt *S, |
2983 | CheckerContext &C) const { |
2984 | if (!S) |
2985 | return; |
2986 | |
2987 | const Expr *E = S->getRetValue(); |
2988 | if (!E) |
2989 | return; |
2990 | |
2991 | // Check if we are returning a symbol. |
2992 | ProgramStateRef State = C.getState(); |
2993 | SVal RetVal = C.getSVal(S: E); |
2994 | SymbolRef Sym = RetVal.getAsSymbol(); |
2995 | if (!Sym) |
2996 | // If we are returning a field of the allocated struct or an array element, |
2997 | // the callee could still free the memory. |
2998 | // TODO: This logic should be a part of generic symbol escape callback. |
2999 | if (const MemRegion *MR = RetVal.getAsRegion()) |
3000 | if (isa<FieldRegion, ElementRegion>(Val: MR)) |
3001 | if (const SymbolicRegion *BMR = |
3002 | dyn_cast<SymbolicRegion>(Val: MR->getBaseRegion())) |
3003 | Sym = BMR->getSymbol(); |
3004 | |
3005 | // Check if we are returning freed memory. |
3006 | if (Sym) |
3007 | checkUseAfterFree(Sym, C, S: E); |
3008 | } |
3009 | |
3010 | // TODO: Blocks should be either inlined or should call invalidate regions |
3011 | // upon invocation. After that's in place, special casing here will not be |
3012 | // needed. |
3013 | void MallocChecker::checkPostStmt(const BlockExpr *BE, |
3014 | CheckerContext &C) const { |
3015 | |
3016 | // Scan the BlockDecRefExprs for any object the retain count checker |
3017 | // may be tracking. |
3018 | if (!BE->getBlockDecl()->hasCaptures()) |
3019 | return; |
3020 | |
3021 | ProgramStateRef state = C.getState(); |
3022 | const BlockDataRegion *R = |
3023 | cast<BlockDataRegion>(Val: C.getSVal(S: BE).getAsRegion()); |
3024 | |
3025 | auto ReferencedVars = R->referenced_vars(); |
3026 | if (ReferencedVars.empty()) |
3027 | return; |
3028 | |
3029 | SmallVector<const MemRegion*, 10> Regions; |
3030 | const LocationContext *LC = C.getLocationContext(); |
3031 | MemRegionManager &MemMgr = C.getSValBuilder().getRegionManager(); |
3032 | |
3033 | for (const auto &Var : ReferencedVars) { |
3034 | const VarRegion *VR = Var.getCapturedRegion(); |
3035 | if (VR->getSuperRegion() == R) { |
3036 | VR = MemMgr.getVarRegion(VD: VR->getDecl(), LC); |
3037 | } |
3038 | Regions.push_back(Elt: VR); |
3039 | } |
3040 | |
3041 | state = |
3042 | state->scanReachableSymbols<StopTrackingCallback>(Reachable: Regions).getState(); |
3043 | C.addTransition(State: state); |
3044 | } |
3045 | |
3046 | static bool isReleased(SymbolRef Sym, CheckerContext &C) { |
3047 | assert(Sym); |
3048 | const RefState *RS = C.getState()->get<RegionState>(key: Sym); |
3049 | return (RS && RS->isReleased()); |
3050 | } |
3051 | |
3052 | bool MallocChecker::suppressDeallocationsInSuspiciousContexts( |
3053 | const CallEvent &Call, CheckerContext &C) const { |
3054 | if (Call.getNumArgs() == 0) |
3055 | return false; |
3056 | |
3057 | StringRef FunctionStr = "" ; |
3058 | if (const auto *FD = dyn_cast<FunctionDecl>(Val: C.getStackFrame()->getDecl())) |
3059 | if (const Stmt *Body = FD->getBody()) |
3060 | if (Body->getBeginLoc().isValid()) |
3061 | FunctionStr = |
3062 | Lexer::getSourceText(Range: CharSourceRange::getTokenRange( |
3063 | R: {FD->getBeginLoc(), Body->getBeginLoc()}), |
3064 | SM: C.getSourceManager(), LangOpts: C.getLangOpts()); |
3065 | |
3066 | // We do not model the Integer Set Library's retain-count based allocation. |
3067 | if (!FunctionStr.contains(Other: "__isl_" )) |
3068 | return false; |
3069 | |
3070 | ProgramStateRef State = C.getState(); |
3071 | |
3072 | for (const Expr *Arg : cast<CallExpr>(Val: Call.getOriginExpr())->arguments()) |
3073 | if (SymbolRef Sym = C.getSVal(S: Arg).getAsSymbol()) |
3074 | if (const RefState *RS = State->get<RegionState>(key: Sym)) |
3075 | State = State->set<RegionState>(K: Sym, E: RefState::getEscaped(RS)); |
3076 | |
3077 | C.addTransition(State); |
3078 | return true; |
3079 | } |
3080 | |
3081 | bool MallocChecker::checkUseAfterFree(SymbolRef Sym, CheckerContext &C, |
3082 | const Stmt *S) const { |
3083 | |
3084 | if (isReleased(Sym, C)) { |
3085 | HandleUseAfterFree(C, Range: S->getSourceRange(), Sym); |
3086 | return true; |
3087 | } |
3088 | |
3089 | return false; |
3090 | } |
3091 | |
3092 | void MallocChecker::checkUseZeroAllocated(SymbolRef Sym, CheckerContext &C, |
3093 | const Stmt *S) const { |
3094 | assert(Sym); |
3095 | |
3096 | if (const RefState *RS = C.getState()->get<RegionState>(key: Sym)) { |
3097 | if (RS->isAllocatedOfSizeZero()) |
3098 | HandleUseZeroAlloc(C, Range: RS->getStmt()->getSourceRange(), Sym); |
3099 | } |
3100 | else if (C.getState()->contains<ReallocSizeZeroSymbols>(key: Sym)) { |
3101 | HandleUseZeroAlloc(C, Range: S->getSourceRange(), Sym); |
3102 | } |
3103 | } |
3104 | |
3105 | bool MallocChecker::checkDoubleDelete(SymbolRef Sym, CheckerContext &C) const { |
3106 | |
3107 | if (isReleased(Sym, C)) { |
3108 | HandleDoubleDelete(C, Sym); |
3109 | return true; |
3110 | } |
3111 | return false; |
3112 | } |
3113 | |
3114 | // Check if the location is a freed symbolic region. |
3115 | void MallocChecker::checkLocation(SVal l, bool isLoad, const Stmt *S, |
3116 | CheckerContext &C) const { |
3117 | SymbolRef Sym = l.getLocSymbolInBase(); |
3118 | if (Sym) { |
3119 | checkUseAfterFree(Sym, C, S); |
3120 | checkUseZeroAllocated(Sym, C, S); |
3121 | } |
3122 | } |
3123 | |
3124 | // If a symbolic region is assumed to NULL (or another constant), stop tracking |
3125 | // it - assuming that allocation failed on this path. |
3126 | ProgramStateRef MallocChecker::evalAssume(ProgramStateRef state, |
3127 | SVal Cond, |
3128 | bool Assumption) const { |
3129 | RegionStateTy RS = state->get<RegionState>(); |
3130 | for (SymbolRef Sym : llvm::make_first_range(c&: RS)) { |
3131 | // If the symbol is assumed to be NULL, remove it from consideration. |
3132 | ConstraintManager &CMgr = state->getConstraintManager(); |
3133 | ConditionTruthVal AllocFailed = CMgr.isNull(State: state, Sym); |
3134 | if (AllocFailed.isConstrainedTrue()) |
3135 | state = state->remove<RegionState>(K: Sym); |
3136 | } |
3137 | |
3138 | // Realloc returns 0 when reallocation fails, which means that we should |
3139 | // restore the state of the pointer being reallocated. |
3140 | ReallocPairsTy RP = state->get<ReallocPairs>(); |
3141 | for (auto [Sym, ReallocPair] : RP) { |
3142 | // If the symbol is assumed to be NULL, remove it from consideration. |
3143 | ConstraintManager &CMgr = state->getConstraintManager(); |
3144 | ConditionTruthVal AllocFailed = CMgr.isNull(State: state, Sym); |
3145 | if (!AllocFailed.isConstrainedTrue()) |
3146 | continue; |
3147 | |
3148 | SymbolRef ReallocSym = ReallocPair.ReallocatedSym; |
3149 | if (const RefState *RS = state->get<RegionState>(key: ReallocSym)) { |
3150 | if (RS->isReleased()) { |
3151 | switch (ReallocPair.Kind) { |
3152 | case OAR_ToBeFreedAfterFailure: |
3153 | state = state->set<RegionState>(K: ReallocSym, |
3154 | E: RefState::getAllocated(family: RS->getAllocationFamily(), s: RS->getStmt())); |
3155 | break; |
3156 | case OAR_DoNotTrackAfterFailure: |
3157 | state = state->remove<RegionState>(K: ReallocSym); |
3158 | break; |
3159 | default: |
3160 | assert(ReallocPair.Kind == OAR_FreeOnFailure); |
3161 | } |
3162 | } |
3163 | } |
3164 | state = state->remove<ReallocPairs>(K: Sym); |
3165 | } |
3166 | |
3167 | return state; |
3168 | } |
3169 | |
3170 | bool MallocChecker::mayFreeAnyEscapedMemoryOrIsModeledExplicitly( |
3171 | const CallEvent *Call, |
3172 | ProgramStateRef State, |
3173 | SymbolRef &EscapingSymbol) const { |
3174 | assert(Call); |
3175 | EscapingSymbol = nullptr; |
3176 | |
3177 | // For now, assume that any C++ or block call can free memory. |
3178 | // TODO: If we want to be more optimistic here, we'll need to make sure that |
3179 | // regions escape to C++ containers. They seem to do that even now, but for |
3180 | // mysterious reasons. |
3181 | if (!isa<SimpleFunctionCall, ObjCMethodCall>(Val: Call)) |
3182 | return true; |
3183 | |
3184 | // Check Objective-C messages by selector name. |
3185 | if (const ObjCMethodCall *Msg = dyn_cast<ObjCMethodCall>(Val: Call)) { |
3186 | // If it's not a framework call, or if it takes a callback, assume it |
3187 | // can free memory. |
3188 | if (!Call->isInSystemHeader() || Call->argumentsMayEscape()) |
3189 | return true; |
3190 | |
3191 | // If it's a method we know about, handle it explicitly post-call. |
3192 | // This should happen before the "freeWhenDone" check below. |
3193 | if (isKnownDeallocObjCMethodName(Call: *Msg)) |
3194 | return false; |
3195 | |
3196 | // If there's a "freeWhenDone" parameter, but the method isn't one we know |
3197 | // about, we can't be sure that the object will use free() to deallocate the |
3198 | // memory, so we can't model it explicitly. The best we can do is use it to |
3199 | // decide whether the pointer escapes. |
3200 | if (std::optional<bool> FreeWhenDone = getFreeWhenDoneArg(Call: *Msg)) |
3201 | return *FreeWhenDone; |
3202 | |
3203 | // If the first selector piece ends with "NoCopy", and there is no |
3204 | // "freeWhenDone" parameter set to zero, we know ownership is being |
3205 | // transferred. Again, though, we can't be sure that the object will use |
3206 | // free() to deallocate the memory, so we can't model it explicitly. |
3207 | StringRef FirstSlot = Msg->getSelector().getNameForSlot(argIndex: 0); |
3208 | if (FirstSlot.ends_with(Suffix: "NoCopy" )) |
3209 | return true; |
3210 | |
3211 | // If the first selector starts with addPointer, insertPointer, |
3212 | // or replacePointer, assume we are dealing with NSPointerArray or similar. |
3213 | // This is similar to C++ containers (vector); we still might want to check |
3214 | // that the pointers get freed by following the container itself. |
3215 | if (FirstSlot.starts_with(Prefix: "addPointer" ) || |
3216 | FirstSlot.starts_with(Prefix: "insertPointer" ) || |
3217 | FirstSlot.starts_with(Prefix: "replacePointer" ) || |
3218 | FirstSlot == "valueWithPointer" ) { |
3219 | return true; |
3220 | } |
3221 | |
3222 | // We should escape receiver on call to 'init'. This is especially relevant |
3223 | // to the receiver, as the corresponding symbol is usually not referenced |
3224 | // after the call. |
3225 | if (Msg->getMethodFamily() == OMF_init) { |
3226 | EscapingSymbol = Msg->getReceiverSVal().getAsSymbol(); |
3227 | return true; |
3228 | } |
3229 | |
3230 | // Otherwise, assume that the method does not free memory. |
3231 | // Most framework methods do not free memory. |
3232 | return false; |
3233 | } |
3234 | |
3235 | // At this point the only thing left to handle is straight function calls. |
3236 | const FunctionDecl *FD = cast<SimpleFunctionCall>(Val: Call)->getDecl(); |
3237 | if (!FD) |
3238 | return true; |
3239 | |
3240 | // If it's one of the allocation functions we can reason about, we model |
3241 | // its behavior explicitly. |
3242 | if (isMemCall(Call: *Call)) |
3243 | return false; |
3244 | |
3245 | // If it's not a system call, assume it frees memory. |
3246 | if (!Call->isInSystemHeader()) |
3247 | return true; |
3248 | |
3249 | // White list the system functions whose arguments escape. |
3250 | const IdentifierInfo *II = FD->getIdentifier(); |
3251 | if (!II) |
3252 | return true; |
3253 | StringRef FName = II->getName(); |
3254 | |
3255 | // White list the 'XXXNoCopy' CoreFoundation functions. |
3256 | // We specifically check these before |
3257 | if (FName.ends_with(Suffix: "NoCopy" )) { |
3258 | // Look for the deallocator argument. We know that the memory ownership |
3259 | // is not transferred only if the deallocator argument is |
3260 | // 'kCFAllocatorNull'. |
3261 | for (unsigned i = 1; i < Call->getNumArgs(); ++i) { |
3262 | const Expr *ArgE = Call->getArgExpr(Index: i)->IgnoreParenCasts(); |
3263 | if (const DeclRefExpr *DE = dyn_cast<DeclRefExpr>(Val: ArgE)) { |
3264 | StringRef DeallocatorName = DE->getFoundDecl()->getName(); |
3265 | if (DeallocatorName == "kCFAllocatorNull" ) |
3266 | return false; |
3267 | } |
3268 | } |
3269 | return true; |
3270 | } |
3271 | |
3272 | // Associating streams with malloced buffers. The pointer can escape if |
3273 | // 'closefn' is specified (and if that function does free memory), |
3274 | // but it will not if closefn is not specified. |
3275 | // Currently, we do not inspect the 'closefn' function (PR12101). |
3276 | if (FName == "funopen" ) |
3277 | if (Call->getNumArgs() >= 4 && Call->getArgSVal(Index: 4).isConstant(I: 0)) |
3278 | return false; |
3279 | |
3280 | // Do not warn on pointers passed to 'setbuf' when used with std streams, |
3281 | // these leaks might be intentional when setting the buffer for stdio. |
3282 | // http://stackoverflow.com/questions/2671151/who-frees-setvbuf-buffer |
3283 | if (FName == "setbuf" || FName =="setbuffer" || |
3284 | FName == "setlinebuf" || FName == "setvbuf" ) { |
3285 | if (Call->getNumArgs() >= 1) { |
3286 | const Expr *ArgE = Call->getArgExpr(Index: 0)->IgnoreParenCasts(); |
3287 | if (const DeclRefExpr *ArgDRE = dyn_cast<DeclRefExpr>(Val: ArgE)) |
3288 | if (const VarDecl *D = dyn_cast<VarDecl>(Val: ArgDRE->getDecl())) |
3289 | if (D->getCanonicalDecl()->getName().contains(Other: "std" )) |
3290 | return true; |
3291 | } |
3292 | } |
3293 | |
3294 | // A bunch of other functions which either take ownership of a pointer or |
3295 | // wrap the result up in a struct or object, meaning it can be freed later. |
3296 | // (See RetainCountChecker.) Not all the parameters here are invalidated, |
3297 | // but the Malloc checker cannot differentiate between them. The right way |
3298 | // of doing this would be to implement a pointer escapes callback. |
3299 | if (FName == "CGBitmapContextCreate" || |
3300 | FName == "CGBitmapContextCreateWithData" || |
3301 | FName == "CVPixelBufferCreateWithBytes" || |
3302 | FName == "CVPixelBufferCreateWithPlanarBytes" || |
3303 | FName == "OSAtomicEnqueue" ) { |
3304 | return true; |
3305 | } |
3306 | |
3307 | if (FName == "postEvent" && |
3308 | FD->getQualifiedNameAsString() == "QCoreApplication::postEvent" ) { |
3309 | return true; |
3310 | } |
3311 | |
3312 | if (FName == "connectImpl" && |
3313 | FD->getQualifiedNameAsString() == "QObject::connectImpl" ) { |
3314 | return true; |
3315 | } |
3316 | |
3317 | if (FName == "singleShotImpl" && |
3318 | FD->getQualifiedNameAsString() == "QTimer::singleShotImpl" ) { |
3319 | return true; |
3320 | } |
3321 | |
3322 | // Handle cases where we know a buffer's /address/ can escape. |
3323 | // Note that the above checks handle some special cases where we know that |
3324 | // even though the address escapes, it's still our responsibility to free the |
3325 | // buffer. |
3326 | if (Call->argumentsMayEscape()) |
3327 | return true; |
3328 | |
3329 | // Otherwise, assume that the function does not free memory. |
3330 | // Most system calls do not free the memory. |
3331 | return false; |
3332 | } |
3333 | |
3334 | ProgramStateRef MallocChecker::checkPointerEscape(ProgramStateRef State, |
3335 | const InvalidatedSymbols &Escaped, |
3336 | const CallEvent *Call, |
3337 | PointerEscapeKind Kind) const { |
3338 | return checkPointerEscapeAux(State, Escaped, Call, Kind, |
3339 | /*IsConstPointerEscape*/ false); |
3340 | } |
3341 | |
3342 | ProgramStateRef MallocChecker::checkConstPointerEscape(ProgramStateRef State, |
3343 | const InvalidatedSymbols &Escaped, |
3344 | const CallEvent *Call, |
3345 | PointerEscapeKind Kind) const { |
3346 | // If a const pointer escapes, it may not be freed(), but it could be deleted. |
3347 | return checkPointerEscapeAux(State, Escaped, Call, Kind, |
3348 | /*IsConstPointerEscape*/ true); |
3349 | } |
3350 | |
3351 | static bool checkIfNewOrNewArrayFamily(const RefState *RS) { |
3352 | return (RS->getAllocationFamily() == AF_CXXNewArray || |
3353 | RS->getAllocationFamily() == AF_CXXNew); |
3354 | } |
3355 | |
3356 | ProgramStateRef MallocChecker::checkPointerEscapeAux( |
3357 | ProgramStateRef State, const InvalidatedSymbols &Escaped, |
3358 | const CallEvent *Call, PointerEscapeKind Kind, |
3359 | bool IsConstPointerEscape) const { |
3360 | // If we know that the call does not free memory, or we want to process the |
3361 | // call later, keep tracking the top level arguments. |
3362 | SymbolRef EscapingSymbol = nullptr; |
3363 | if (Kind == PSK_DirectEscapeOnCall && |
3364 | !mayFreeAnyEscapedMemoryOrIsModeledExplicitly(Call, State, |
3365 | EscapingSymbol) && |
3366 | !EscapingSymbol) { |
3367 | return State; |
3368 | } |
3369 | |
3370 | for (SymbolRef sym : Escaped) { |
3371 | if (EscapingSymbol && EscapingSymbol != sym) |
3372 | continue; |
3373 | |
3374 | if (const RefState *RS = State->get<RegionState>(key: sym)) |
3375 | if (RS->isAllocated() || RS->isAllocatedOfSizeZero()) |
3376 | if (!IsConstPointerEscape || checkIfNewOrNewArrayFamily(RS)) |
3377 | State = State->set<RegionState>(K: sym, E: RefState::getEscaped(RS)); |
3378 | } |
3379 | return State; |
3380 | } |
3381 | |
3382 | bool MallocChecker::isArgZERO_SIZE_PTR(ProgramStateRef State, CheckerContext &C, |
3383 | SVal ArgVal) const { |
3384 | if (!KernelZeroSizePtrValue) |
3385 | KernelZeroSizePtrValue = |
3386 | tryExpandAsInteger(Macro: "ZERO_SIZE_PTR" , PP: C.getPreprocessor()); |
3387 | |
3388 | const llvm::APSInt *ArgValKnown = |
3389 | C.getSValBuilder().getKnownValue(state: State, val: ArgVal); |
3390 | return ArgValKnown && *KernelZeroSizePtrValue && |
3391 | ArgValKnown->getSExtValue() == **KernelZeroSizePtrValue; |
3392 | } |
3393 | |
3394 | static SymbolRef findFailedReallocSymbol(ProgramStateRef currState, |
3395 | ProgramStateRef prevState) { |
3396 | ReallocPairsTy currMap = currState->get<ReallocPairs>(); |
3397 | ReallocPairsTy prevMap = prevState->get<ReallocPairs>(); |
3398 | |
3399 | for (const ReallocPairsTy::value_type &Pair : prevMap) { |
3400 | SymbolRef sym = Pair.first; |
3401 | if (!currMap.lookup(K: sym)) |
3402 | return sym; |
3403 | } |
3404 | |
3405 | return nullptr; |
3406 | } |
3407 | |
3408 | static bool isReferenceCountingPointerDestructor(const CXXDestructorDecl *DD) { |
3409 | if (const IdentifierInfo *II = DD->getParent()->getIdentifier()) { |
3410 | StringRef N = II->getName(); |
3411 | if (N.contains_insensitive(Other: "ptr" ) || N.contains_insensitive(Other: "pointer" )) { |
3412 | if (N.contains_insensitive(Other: "ref" ) || N.contains_insensitive(Other: "cnt" ) || |
3413 | N.contains_insensitive(Other: "intrusive" ) || |
3414 | N.contains_insensitive(Other: "shared" ) || N.ends_with_insensitive(Suffix: "rc" )) { |
3415 | return true; |
3416 | } |
3417 | } |
3418 | } |
3419 | return false; |
3420 | } |
3421 | |
3422 | PathDiagnosticPieceRef MallocBugVisitor::VisitNode(const ExplodedNode *N, |
3423 | BugReporterContext &BRC, |
3424 | PathSensitiveBugReport &BR) { |
3425 | ProgramStateRef state = N->getState(); |
3426 | ProgramStateRef statePrev = N->getFirstPred()->getState(); |
3427 | |
3428 | const RefState *RSCurr = state->get<RegionState>(key: Sym); |
3429 | const RefState *RSPrev = statePrev->get<RegionState>(key: Sym); |
3430 | |
3431 | const Stmt *S = N->getStmtForDiagnostics(); |
3432 | // When dealing with containers, we sometimes want to give a note |
3433 | // even if the statement is missing. |
3434 | if (!S && (!RSCurr || RSCurr->getAllocationFamily() != AF_InnerBuffer)) |
3435 | return nullptr; |
3436 | |
3437 | const LocationContext *CurrentLC = N->getLocationContext(); |
3438 | |
3439 | // If we find an atomic fetch_add or fetch_sub within the destructor in which |
3440 | // the pointer was released (before the release), this is likely a destructor |
3441 | // of a shared pointer. |
3442 | // Because we don't model atomics, and also because we don't know that the |
3443 | // original reference count is positive, we should not report use-after-frees |
3444 | // on objects deleted in such destructors. This can probably be improved |
3445 | // through better shared pointer modeling. |
3446 | if (ReleaseDestructorLC && (ReleaseDestructorLC == CurrentLC || |
3447 | ReleaseDestructorLC->isParentOf(LC: CurrentLC))) { |
3448 | if (const auto *AE = dyn_cast<AtomicExpr>(Val: S)) { |
3449 | // Check for manual use of atomic builtins. |
3450 | AtomicExpr::AtomicOp Op = AE->getOp(); |
3451 | if (Op == AtomicExpr::AO__c11_atomic_fetch_add || |
3452 | Op == AtomicExpr::AO__c11_atomic_fetch_sub) { |
3453 | BR.markInvalid(Tag: getTag(), Data: S); |
3454 | } |
3455 | } else if (const auto *CE = dyn_cast<CallExpr>(Val: S)) { |
3456 | // Check for `std::atomic` and such. This covers both regular method calls |
3457 | // and operator calls. |
3458 | if (const auto *MD = |
3459 | dyn_cast_or_null<CXXMethodDecl>(Val: CE->getDirectCallee())) { |
3460 | const CXXRecordDecl *RD = MD->getParent(); |
3461 | // A bit wobbly with ".contains()" because it may be like |
3462 | // "__atomic_base" or something. |
3463 | if (StringRef(RD->getNameAsString()).contains(Other: "atomic" )) { |
3464 | BR.markInvalid(Tag: getTag(), Data: S); |
3465 | } |
3466 | } |
3467 | } |
3468 | } |
3469 | |
3470 | // FIXME: We will eventually need to handle non-statement-based events |
3471 | // (__attribute__((cleanup))). |
3472 | |
3473 | // Find out if this is an interesting point and what is the kind. |
3474 | StringRef Msg; |
3475 | std::unique_ptr<StackHintGeneratorForSymbol> StackHint = nullptr; |
3476 | SmallString<256> Buf; |
3477 | llvm::raw_svector_ostream OS(Buf); |
3478 | |
3479 | if (Mode == Normal) { |
3480 | if (isAllocated(RSCurr, RSPrev, Stmt: S)) { |
3481 | Msg = "Memory is allocated" ; |
3482 | StackHint = std::make_unique<StackHintGeneratorForSymbol>( |
3483 | args&: Sym, args: "Returned allocated memory" ); |
3484 | } else if (isReleased(RSCurr, RSPrev, Stmt: S)) { |
3485 | const auto Family = RSCurr->getAllocationFamily(); |
3486 | switch (Family) { |
3487 | case AF_Alloca: |
3488 | case AF_Malloc: |
3489 | case AF_CXXNew: |
3490 | case AF_CXXNewArray: |
3491 | case AF_IfNameIndex: |
3492 | Msg = "Memory is released" ; |
3493 | StackHint = std::make_unique<StackHintGeneratorForSymbol>( |
3494 | args&: Sym, args: "Returning; memory was released" ); |
3495 | break; |
3496 | case AF_InnerBuffer: { |
3497 | const MemRegion *ObjRegion = |
3498 | allocation_state::getContainerObjRegion(State: statePrev, Sym); |
3499 | const auto *TypedRegion = cast<TypedValueRegion>(Val: ObjRegion); |
3500 | QualType ObjTy = TypedRegion->getValueType(); |
3501 | OS << "Inner buffer of '" << ObjTy << "' " ; |
3502 | |
3503 | if (N->getLocation().getKind() == ProgramPoint::PostImplicitCallKind) { |
3504 | OS << "deallocated by call to destructor" ; |
3505 | StackHint = std::make_unique<StackHintGeneratorForSymbol>( |
3506 | args&: Sym, args: "Returning; inner buffer was deallocated" ); |
3507 | } else { |
3508 | OS << "reallocated by call to '" ; |
3509 | const Stmt *S = RSCurr->getStmt(); |
3510 | if (const auto *MemCallE = dyn_cast<CXXMemberCallExpr>(Val: S)) { |
3511 | OS << MemCallE->getMethodDecl()->getDeclName(); |
3512 | } else if (const auto *OpCallE = dyn_cast<CXXOperatorCallExpr>(Val: S)) { |
3513 | OS << OpCallE->getDirectCallee()->getDeclName(); |
3514 | } else if (const auto *CallE = dyn_cast<CallExpr>(Val: S)) { |
3515 | auto &CEMgr = BRC.getStateManager().getCallEventManager(); |
3516 | CallEventRef<> Call = |
3517 | CEMgr.getSimpleCall(E: CallE, State: state, LCtx: CurrentLC, ElemRef: {nullptr, 0}); |
3518 | if (const auto *D = dyn_cast_or_null<NamedDecl>(Val: Call->getDecl())) |
3519 | OS << D->getDeclName(); |
3520 | else |
3521 | OS << "unknown" ; |
3522 | } |
3523 | OS << "'" ; |
3524 | StackHint = std::make_unique<StackHintGeneratorForSymbol>( |
3525 | args&: Sym, args: "Returning; inner buffer was reallocated" ); |
3526 | } |
3527 | Msg = OS.str(); |
3528 | break; |
3529 | } |
3530 | case AF_None: |
3531 | llvm_unreachable("Unhandled allocation family!" ); |
3532 | } |
3533 | |
3534 | // See if we're releasing memory while inlining a destructor |
3535 | // (or one of its callees). This turns on various common |
3536 | // false positive suppressions. |
3537 | bool FoundAnyDestructor = false; |
3538 | for (const LocationContext *LC = CurrentLC; LC; LC = LC->getParent()) { |
3539 | if (const auto *DD = dyn_cast<CXXDestructorDecl>(Val: LC->getDecl())) { |
3540 | if (isReferenceCountingPointerDestructor(DD)) { |
3541 | // This immediately looks like a reference-counting destructor. |
3542 | // We're bad at guessing the original reference count of the object, |
3543 | // so suppress the report for now. |
3544 | BR.markInvalid(Tag: getTag(), Data: DD); |
3545 | } else if (!FoundAnyDestructor) { |
3546 | assert(!ReleaseDestructorLC && |
3547 | "There can be only one release point!" ); |
3548 | // Suspect that it's a reference counting pointer destructor. |
3549 | // On one of the next nodes might find out that it has atomic |
3550 | // reference counting operations within it (see the code above), |
3551 | // and if so, we'd conclude that it likely is a reference counting |
3552 | // pointer destructor. |
3553 | ReleaseDestructorLC = LC->getStackFrame(); |
3554 | // It is unlikely that releasing memory is delegated to a destructor |
3555 | // inside a destructor of a shared pointer, because it's fairly hard |
3556 | // to pass the information that the pointer indeed needs to be |
3557 | // released into it. So we're only interested in the innermost |
3558 | // destructor. |
3559 | FoundAnyDestructor = true; |
3560 | } |
3561 | } |
3562 | } |
3563 | } else if (isRelinquished(RSCurr, RSPrev, Stmt: S)) { |
3564 | Msg = "Memory ownership is transferred" ; |
3565 | StackHint = std::make_unique<StackHintGeneratorForSymbol>(args&: Sym, args: "" ); |
3566 | } else if (hasReallocFailed(RSCurr, RSPrev, Stmt: S)) { |
3567 | Mode = ReallocationFailed; |
3568 | Msg = "Reallocation failed" ; |
3569 | StackHint = std::make_unique<StackHintGeneratorForReallocationFailed>( |
3570 | args&: Sym, args: "Reallocation failed" ); |
3571 | |
3572 | if (SymbolRef sym = findFailedReallocSymbol(currState: state, prevState: statePrev)) { |
3573 | // Is it possible to fail two reallocs WITHOUT testing in between? |
3574 | assert((!FailedReallocSymbol || FailedReallocSymbol == sym) && |
3575 | "We only support one failed realloc at a time." ); |
3576 | BR.markInteresting(sym); |
3577 | FailedReallocSymbol = sym; |
3578 | } |
3579 | } |
3580 | |
3581 | // We are in a special mode if a reallocation failed later in the path. |
3582 | } else if (Mode == ReallocationFailed) { |
3583 | assert(FailedReallocSymbol && "No symbol to look for." ); |
3584 | |
3585 | // Is this is the first appearance of the reallocated symbol? |
3586 | if (!statePrev->get<RegionState>(key: FailedReallocSymbol)) { |
3587 | // We're at the reallocation point. |
3588 | Msg = "Attempt to reallocate memory" ; |
3589 | StackHint = std::make_unique<StackHintGeneratorForSymbol>( |
3590 | args&: Sym, args: "Returned reallocated memory" ); |
3591 | FailedReallocSymbol = nullptr; |
3592 | Mode = Normal; |
3593 | } |
3594 | } |
3595 | |
3596 | if (Msg.empty()) { |
3597 | assert(!StackHint); |
3598 | return nullptr; |
3599 | } |
3600 | |
3601 | assert(StackHint); |
3602 | |
3603 | // Generate the extra diagnostic. |
3604 | PathDiagnosticLocation Pos; |
3605 | if (!S) { |
3606 | assert(RSCurr->getAllocationFamily() == AF_InnerBuffer); |
3607 | auto PostImplCall = N->getLocation().getAs<PostImplicitCall>(); |
3608 | if (!PostImplCall) |
3609 | return nullptr; |
3610 | Pos = PathDiagnosticLocation(PostImplCall->getLocation(), |
3611 | BRC.getSourceManager()); |
3612 | } else { |
3613 | Pos = PathDiagnosticLocation(S, BRC.getSourceManager(), |
3614 | N->getLocationContext()); |
3615 | } |
3616 | |
3617 | auto P = std::make_shared<PathDiagnosticEventPiece>(args&: Pos, args&: Msg, args: true); |
3618 | BR.addCallStackHint(Piece: P, StackHint: std::move(StackHint)); |
3619 | return P; |
3620 | } |
3621 | |
3622 | void MallocChecker::printState(raw_ostream &Out, ProgramStateRef State, |
3623 | const char *NL, const char *Sep) const { |
3624 | |
3625 | RegionStateTy RS = State->get<RegionState>(); |
3626 | |
3627 | if (!RS.isEmpty()) { |
3628 | Out << Sep << "MallocChecker :" << NL; |
3629 | for (auto [Sym, Data] : RS) { |
3630 | const RefState *RefS = State->get<RegionState>(key: Sym); |
3631 | AllocationFamily Family = RefS->getAllocationFamily(); |
3632 | std::optional<MallocChecker::CheckKind> CheckKind = |
3633 | getCheckIfTracked(Family); |
3634 | if (!CheckKind) |
3635 | CheckKind = getCheckIfTracked(Family, IsALeakCheck: true); |
3636 | |
3637 | Sym->dumpToStream(os&: Out); |
3638 | Out << " : " ; |
3639 | Data.dump(OS&: Out); |
3640 | if (CheckKind) |
3641 | Out << " (" << CheckNames[*CheckKind].getName() << ")" ; |
3642 | Out << NL; |
3643 | } |
3644 | } |
3645 | } |
3646 | |
3647 | namespace clang { |
3648 | namespace ento { |
3649 | namespace allocation_state { |
3650 | |
3651 | ProgramStateRef |
3652 | markReleased(ProgramStateRef State, SymbolRef Sym, const Expr *Origin) { |
3653 | AllocationFamily Family = AF_InnerBuffer; |
3654 | return State->set<RegionState>(K: Sym, E: RefState::getReleased(family: Family, s: Origin)); |
3655 | } |
3656 | |
3657 | } // end namespace allocation_state |
3658 | } // end namespace ento |
3659 | } // end namespace clang |
3660 | |
3661 | // Intended to be used in InnerPointerChecker to register the part of |
3662 | // MallocChecker connected to it. |
3663 | void ento::registerInnerPointerCheckerAux(CheckerManager &mgr) { |
3664 | MallocChecker *checker = mgr.getChecker<MallocChecker>(); |
3665 | checker->ChecksEnabled[MallocChecker::CK_InnerPointerChecker] = true; |
3666 | checker->CheckNames[MallocChecker::CK_InnerPointerChecker] = |
3667 | mgr.getCurrentCheckerName(); |
3668 | } |
3669 | |
3670 | void ento::registerDynamicMemoryModeling(CheckerManager &mgr) { |
3671 | auto *checker = mgr.registerChecker<MallocChecker>(); |
3672 | checker->ShouldIncludeOwnershipAnnotatedFunctions = |
3673 | mgr.getAnalyzerOptions().getCheckerBooleanOption(C: checker, OptionName: "Optimistic" ); |
3674 | checker->ShouldRegisterNoOwnershipChangeVisitor = |
3675 | mgr.getAnalyzerOptions().getCheckerBooleanOption( |
3676 | C: checker, OptionName: "AddNoOwnershipChangeNotes" ); |
3677 | } |
3678 | |
3679 | bool ento::shouldRegisterDynamicMemoryModeling(const CheckerManager &mgr) { |
3680 | return true; |
3681 | } |
3682 | |
3683 | #define REGISTER_CHECKER(name) \ |
3684 | void ento::register##name(CheckerManager &mgr) { \ |
3685 | MallocChecker *checker = mgr.getChecker<MallocChecker>(); \ |
3686 | checker->ChecksEnabled[MallocChecker::CK_##name] = true; \ |
3687 | checker->CheckNames[MallocChecker::CK_##name] = \ |
3688 | mgr.getCurrentCheckerName(); \ |
3689 | } \ |
3690 | \ |
3691 | bool ento::shouldRegister##name(const CheckerManager &mgr) { return true; } |
3692 | |
3693 | REGISTER_CHECKER(MallocChecker) |
3694 | REGISTER_CHECKER(NewDeleteChecker) |
3695 | REGISTER_CHECKER(NewDeleteLeaksChecker) |
3696 | REGISTER_CHECKER(MismatchedDeallocatorChecker) |
3697 | REGISTER_CHECKER(TaintedAllocChecker) |
3698 | |