1 | //===-- SanitizerCoverage.cpp - coverage instrumentation for sanitizers ---===// |
2 | // |
3 | // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. |
4 | // See https://llvm.org/LICENSE.txt for license information. |
5 | // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception |
6 | // |
7 | //===----------------------------------------------------------------------===// |
8 | // |
9 | // Coverage instrumentation done on LLVM IR level, works with Sanitizers. |
10 | // |
11 | //===----------------------------------------------------------------------===// |
12 | |
13 | #include "llvm/Transforms/Instrumentation/SanitizerCoverage.h" |
14 | #include "llvm/ADT/ArrayRef.h" |
15 | #include "llvm/ADT/SmallVector.h" |
16 | #include "llvm/Analysis/GlobalsModRef.h" |
17 | #include "llvm/Analysis/PostDominators.h" |
18 | #include "llvm/IR/Constant.h" |
19 | #include "llvm/IR/Constants.h" |
20 | #include "llvm/IR/DataLayout.h" |
21 | #include "llvm/IR/Dominators.h" |
22 | #include "llvm/IR/EHPersonalities.h" |
23 | #include "llvm/IR/Function.h" |
24 | #include "llvm/IR/GlobalVariable.h" |
25 | #include "llvm/IR/IRBuilder.h" |
26 | #include "llvm/IR/IntrinsicInst.h" |
27 | #include "llvm/IR/Intrinsics.h" |
28 | #include "llvm/IR/LLVMContext.h" |
29 | #include "llvm/IR/MDBuilder.h" |
30 | #include "llvm/IR/Module.h" |
31 | #include "llvm/IR/Type.h" |
32 | #include "llvm/IR/ValueSymbolTable.h" |
33 | #include "llvm/Support/CommandLine.h" |
34 | #include "llvm/Support/SpecialCaseList.h" |
35 | #include "llvm/Support/VirtualFileSystem.h" |
36 | #include "llvm/TargetParser/Triple.h" |
37 | #include "llvm/Transforms/Utils/BasicBlockUtils.h" |
38 | #include "llvm/Transforms/Utils/ModuleUtils.h" |
39 | |
40 | using namespace llvm; |
41 | |
42 | #define DEBUG_TYPE "sancov" |
43 | |
44 | const char SanCovTracePCIndirName[] = "__sanitizer_cov_trace_pc_indir" ; |
45 | const char SanCovTracePCName[] = "__sanitizer_cov_trace_pc" ; |
46 | const char SanCovTraceCmp1[] = "__sanitizer_cov_trace_cmp1" ; |
47 | const char SanCovTraceCmp2[] = "__sanitizer_cov_trace_cmp2" ; |
48 | const char SanCovTraceCmp4[] = "__sanitizer_cov_trace_cmp4" ; |
49 | const char SanCovTraceCmp8[] = "__sanitizer_cov_trace_cmp8" ; |
50 | const char SanCovTraceConstCmp1[] = "__sanitizer_cov_trace_const_cmp1" ; |
51 | const char SanCovTraceConstCmp2[] = "__sanitizer_cov_trace_const_cmp2" ; |
52 | const char SanCovTraceConstCmp4[] = "__sanitizer_cov_trace_const_cmp4" ; |
53 | const char SanCovTraceConstCmp8[] = "__sanitizer_cov_trace_const_cmp8" ; |
54 | const char SanCovLoad1[] = "__sanitizer_cov_load1" ; |
55 | const char SanCovLoad2[] = "__sanitizer_cov_load2" ; |
56 | const char SanCovLoad4[] = "__sanitizer_cov_load4" ; |
57 | const char SanCovLoad8[] = "__sanitizer_cov_load8" ; |
58 | const char SanCovLoad16[] = "__sanitizer_cov_load16" ; |
59 | const char SanCovStore1[] = "__sanitizer_cov_store1" ; |
60 | const char SanCovStore2[] = "__sanitizer_cov_store2" ; |
61 | const char SanCovStore4[] = "__sanitizer_cov_store4" ; |
62 | const char SanCovStore8[] = "__sanitizer_cov_store8" ; |
63 | const char SanCovStore16[] = "__sanitizer_cov_store16" ; |
64 | const char SanCovTraceDiv4[] = "__sanitizer_cov_trace_div4" ; |
65 | const char SanCovTraceDiv8[] = "__sanitizer_cov_trace_div8" ; |
66 | const char SanCovTraceGep[] = "__sanitizer_cov_trace_gep" ; |
67 | const char SanCovTraceSwitchName[] = "__sanitizer_cov_trace_switch" ; |
68 | const char SanCovModuleCtorTracePcGuardName[] = |
69 | "sancov.module_ctor_trace_pc_guard" ; |
70 | const char SanCovModuleCtor8bitCountersName[] = |
71 | "sancov.module_ctor_8bit_counters" ; |
72 | const char SanCovModuleCtorBoolFlagName[] = "sancov.module_ctor_bool_flag" ; |
73 | static const uint64_t SanCtorAndDtorPriority = 2; |
74 | |
75 | const char SanCovTracePCGuardName[] = "__sanitizer_cov_trace_pc_guard" ; |
76 | const char SanCovTracePCGuardInitName[] = "__sanitizer_cov_trace_pc_guard_init" ; |
77 | const char SanCov8bitCountersInitName[] = "__sanitizer_cov_8bit_counters_init" ; |
78 | const char SanCovBoolFlagInitName[] = "__sanitizer_cov_bool_flag_init" ; |
79 | const char SanCovPCsInitName[] = "__sanitizer_cov_pcs_init" ; |
80 | const char SanCovCFsInitName[] = "__sanitizer_cov_cfs_init" ; |
81 | |
82 | const char SanCovGuardsSectionName[] = "sancov_guards" ; |
83 | const char [] = "sancov_cntrs" ; |
84 | const char SanCovBoolFlagSectionName[] = "sancov_bools" ; |
85 | const char SanCovPCsSectionName[] = "sancov_pcs" ; |
86 | const char SanCovCFsSectionName[] = "sancov_cfs" ; |
87 | const char SanCovCallbackGateSectionName[] = "sancov_gate" ; |
88 | |
89 | const char SanCovStackDepthCallbackName[] = "__sanitizer_cov_stack_depth" ; |
90 | const char SanCovLowestStackName[] = "__sancov_lowest_stack" ; |
91 | const char SanCovCallbackGateName[] = "__sancov_should_track" ; |
92 | |
93 | static cl::opt<int> ClCoverageLevel( |
94 | "sanitizer-coverage-level" , |
95 | cl::desc("Sanitizer Coverage. 0: none, 1: entry block, 2: all blocks, " |
96 | "3: all blocks and critical edges" ), |
97 | cl::Hidden); |
98 | |
99 | static cl::opt<bool> ClTracePC("sanitizer-coverage-trace-pc" , |
100 | cl::desc("Experimental pc tracing" ), cl::Hidden); |
101 | |
102 | static cl::opt<bool> ClTracePCGuard("sanitizer-coverage-trace-pc-guard" , |
103 | cl::desc("pc tracing with a guard" ), |
104 | cl::Hidden); |
105 | |
106 | // If true, we create a global variable that contains PCs of all instrumented |
107 | // BBs, put this global into a named section, and pass this section's bounds |
108 | // to __sanitizer_cov_pcs_init. |
109 | // This way the coverage instrumentation does not need to acquire the PCs |
110 | // at run-time. Works with trace-pc-guard, inline-8bit-counters, and |
111 | // inline-bool-flag. |
112 | static cl::opt<bool> ClCreatePCTable("sanitizer-coverage-pc-table" , |
113 | cl::desc("create a static PC table" ), |
114 | cl::Hidden); |
115 | |
116 | static cl::opt<bool> |
117 | ClInline8bitCounters("sanitizer-coverage-inline-8bit-counters" , |
118 | cl::desc("increments 8-bit counter for every edge" ), |
119 | cl::Hidden); |
120 | |
121 | static cl::opt<bool> |
122 | ClSancovDropCtors("sanitizer-coverage-drop-ctors" , |
123 | cl::desc("do not emit module ctors for global counters" ), |
124 | cl::Hidden); |
125 | |
126 | static cl::opt<bool> |
127 | ClInlineBoolFlag("sanitizer-coverage-inline-bool-flag" , |
128 | cl::desc("sets a boolean flag for every edge" ), |
129 | cl::Hidden); |
130 | |
131 | static cl::opt<bool> |
132 | ClCMPTracing("sanitizer-coverage-trace-compares" , |
133 | cl::desc("Tracing of CMP and similar instructions" ), |
134 | cl::Hidden); |
135 | |
136 | static cl::opt<bool> ClDIVTracing("sanitizer-coverage-trace-divs" , |
137 | cl::desc("Tracing of DIV instructions" ), |
138 | cl::Hidden); |
139 | |
140 | static cl::opt<bool> ClLoadTracing("sanitizer-coverage-trace-loads" , |
141 | cl::desc("Tracing of load instructions" ), |
142 | cl::Hidden); |
143 | |
144 | static cl::opt<bool> ClStoreTracing("sanitizer-coverage-trace-stores" , |
145 | cl::desc("Tracing of store instructions" ), |
146 | cl::Hidden); |
147 | |
148 | static cl::opt<bool> ClGEPTracing("sanitizer-coverage-trace-geps" , |
149 | cl::desc("Tracing of GEP instructions" ), |
150 | cl::Hidden); |
151 | |
152 | static cl::opt<bool> |
153 | ClPruneBlocks("sanitizer-coverage-prune-blocks" , |
154 | cl::desc("Reduce the number of instrumented blocks" ), |
155 | cl::Hidden, cl::init(Val: true)); |
156 | |
157 | static cl::opt<bool> ClStackDepth("sanitizer-coverage-stack-depth" , |
158 | cl::desc("max stack depth tracing" ), |
159 | cl::Hidden); |
160 | |
161 | static cl::opt<int> ClStackDepthCallbackMin( |
162 | "sanitizer-coverage-stack-depth-callback-min" , |
163 | cl::desc("max stack depth tracing should use callback and only when " |
164 | "stack depth more than specified" ), |
165 | cl::Hidden); |
166 | |
167 | static cl::opt<bool> |
168 | ClCollectCF("sanitizer-coverage-control-flow" , |
169 | cl::desc("collect control flow for each function" ), cl::Hidden); |
170 | |
171 | static cl::opt<bool> ClGatedCallbacks( |
172 | "sanitizer-coverage-gated-trace-callbacks" , |
173 | cl::desc("Gate the invocation of the tracing callbacks on a global variable" |
174 | ". Currently only supported for trace-pc-guard and trace-cmp." ), |
175 | cl::Hidden, cl::init(Val: false)); |
176 | |
177 | namespace { |
178 | |
179 | SanitizerCoverageOptions getOptions(int LegacyCoverageLevel) { |
180 | SanitizerCoverageOptions Res; |
181 | switch (LegacyCoverageLevel) { |
182 | case 0: |
183 | Res.CoverageType = SanitizerCoverageOptions::SCK_None; |
184 | break; |
185 | case 1: |
186 | Res.CoverageType = SanitizerCoverageOptions::SCK_Function; |
187 | break; |
188 | case 2: |
189 | Res.CoverageType = SanitizerCoverageOptions::SCK_BB; |
190 | break; |
191 | case 3: |
192 | Res.CoverageType = SanitizerCoverageOptions::SCK_Edge; |
193 | break; |
194 | case 4: |
195 | Res.CoverageType = SanitizerCoverageOptions::SCK_Edge; |
196 | Res.IndirectCalls = true; |
197 | break; |
198 | } |
199 | return Res; |
200 | } |
201 | |
202 | SanitizerCoverageOptions OverrideFromCL(SanitizerCoverageOptions Options) { |
203 | // Sets CoverageType and IndirectCalls. |
204 | SanitizerCoverageOptions CLOpts = getOptions(LegacyCoverageLevel: ClCoverageLevel); |
205 | Options.CoverageType = std::max(a: Options.CoverageType, b: CLOpts.CoverageType); |
206 | Options.IndirectCalls |= CLOpts.IndirectCalls; |
207 | Options.TraceCmp |= ClCMPTracing; |
208 | Options.TraceDiv |= ClDIVTracing; |
209 | Options.TraceGep |= ClGEPTracing; |
210 | Options.TracePC |= ClTracePC; |
211 | Options.TracePCGuard |= ClTracePCGuard; |
212 | Options.Inline8bitCounters |= ClInline8bitCounters; |
213 | Options.InlineBoolFlag |= ClInlineBoolFlag; |
214 | Options.PCTable |= ClCreatePCTable; |
215 | Options.NoPrune |= !ClPruneBlocks; |
216 | Options.StackDepth |= ClStackDepth; |
217 | Options.StackDepthCallbackMin = std::max(a: Options.StackDepthCallbackMin, |
218 | b: ClStackDepthCallbackMin.getValue()); |
219 | Options.TraceLoads |= ClLoadTracing; |
220 | Options.TraceStores |= ClStoreTracing; |
221 | Options.GatedCallbacks |= ClGatedCallbacks; |
222 | if (!Options.TracePCGuard && !Options.TracePC && |
223 | !Options.Inline8bitCounters && !Options.StackDepth && |
224 | !Options.InlineBoolFlag && !Options.TraceLoads && !Options.TraceStores) |
225 | Options.TracePCGuard = true; // TracePCGuard is default. |
226 | Options.CollectControlFlow |= ClCollectCF; |
227 | return Options; |
228 | } |
229 | |
230 | class ModuleSanitizerCoverage { |
231 | public: |
232 | using DomTreeCallback = function_ref<const DominatorTree &(Function &F)>; |
233 | using PostDomTreeCallback = |
234 | function_ref<const PostDominatorTree &(Function &F)>; |
235 | |
236 | ModuleSanitizerCoverage(Module &M, DomTreeCallback DTCallback, |
237 | PostDomTreeCallback PDTCallback, |
238 | const SanitizerCoverageOptions &Options, |
239 | const SpecialCaseList *Allowlist, |
240 | const SpecialCaseList *Blocklist) |
241 | : M(M), DTCallback(DTCallback), PDTCallback(PDTCallback), |
242 | Options(Options), Allowlist(Allowlist), Blocklist(Blocklist) {} |
243 | |
244 | bool instrumentModule(); |
245 | |
246 | private: |
247 | void createFunctionControlFlow(Function &F); |
248 | void instrumentFunction(Function &F); |
249 | void InjectCoverageForIndirectCalls(Function &F, |
250 | ArrayRef<Instruction *> IndirCalls); |
251 | void InjectTraceForCmp(Function &F, ArrayRef<Instruction *> CmpTraceTargets, |
252 | Value *&FunctionGateCmp); |
253 | void InjectTraceForDiv(Function &F, |
254 | ArrayRef<BinaryOperator *> DivTraceTargets); |
255 | void InjectTraceForGep(Function &F, |
256 | ArrayRef<GetElementPtrInst *> GepTraceTargets); |
257 | void InjectTraceForLoadsAndStores(Function &F, ArrayRef<LoadInst *> Loads, |
258 | ArrayRef<StoreInst *> Stores); |
259 | void InjectTraceForSwitch(Function &F, |
260 | ArrayRef<Instruction *> SwitchTraceTargets, |
261 | Value *&FunctionGateCmp); |
262 | bool InjectCoverage(Function &F, ArrayRef<BasicBlock *> AllBlocks, |
263 | Value *&FunctionGateCmp, bool IsLeafFunc); |
264 | GlobalVariable *CreateFunctionLocalArrayInSection(size_t NumElements, |
265 | Function &F, Type *Ty, |
266 | const char *Section); |
267 | GlobalVariable *CreatePCArray(Function &F, ArrayRef<BasicBlock *> AllBlocks); |
268 | void CreateFunctionLocalArrays(Function &F, ArrayRef<BasicBlock *> AllBlocks); |
269 | Instruction *CreateGateBranch(Function &F, Value *&FunctionGateCmp, |
270 | Instruction *I); |
271 | Value *CreateFunctionLocalGateCmp(IRBuilder<> &IRB); |
272 | void InjectCoverageAtBlock(Function &F, BasicBlock &BB, size_t Idx, |
273 | Value *&FunctionGateCmp, bool IsLeafFunc); |
274 | Function *CreateInitCallsForSections(Module &M, const char *CtorName, |
275 | const char *InitFunctionName, Type *Ty, |
276 | const char *Section); |
277 | std::pair<Value *, Value *> CreateSecStartEnd(Module &M, const char *Section, |
278 | Type *Ty); |
279 | |
280 | std::string getSectionName(const std::string &Section) const; |
281 | std::string getSectionStart(const std::string &Section) const; |
282 | std::string getSectionEnd(const std::string &Section) const; |
283 | |
284 | Module &M; |
285 | DomTreeCallback DTCallback; |
286 | PostDomTreeCallback PDTCallback; |
287 | |
288 | FunctionCallee SanCovStackDepthCallback; |
289 | FunctionCallee SanCovTracePCIndir; |
290 | FunctionCallee SanCovTracePC, SanCovTracePCGuard; |
291 | std::array<FunctionCallee, 4> SanCovTraceCmpFunction; |
292 | std::array<FunctionCallee, 4> SanCovTraceConstCmpFunction; |
293 | std::array<FunctionCallee, 5> SanCovLoadFunction; |
294 | std::array<FunctionCallee, 5> SanCovStoreFunction; |
295 | std::array<FunctionCallee, 2> SanCovTraceDivFunction; |
296 | FunctionCallee SanCovTraceGepFunction; |
297 | FunctionCallee SanCovTraceSwitchFunction; |
298 | GlobalVariable *SanCovLowestStack; |
299 | GlobalVariable *SanCovCallbackGate; |
300 | Type *PtrTy, *IntptrTy, *Int64Ty, *Int32Ty, *Int16Ty, *Int8Ty, *Int1Ty; |
301 | Module *CurModule; |
302 | Triple TargetTriple; |
303 | LLVMContext *C; |
304 | const DataLayout *DL; |
305 | |
306 | GlobalVariable *FunctionGuardArray; // for trace-pc-guard. |
307 | GlobalVariable *Function8bitCounterArray; // for inline-8bit-counters. |
308 | GlobalVariable *FunctionBoolArray; // for inline-bool-flag. |
309 | GlobalVariable *FunctionPCsArray; // for pc-table. |
310 | GlobalVariable *FunctionCFsArray; // for control flow table |
311 | SmallVector<GlobalValue *, 20> GlobalsToAppendToUsed; |
312 | SmallVector<GlobalValue *, 20> GlobalsToAppendToCompilerUsed; |
313 | |
314 | SanitizerCoverageOptions Options; |
315 | |
316 | const SpecialCaseList *Allowlist; |
317 | const SpecialCaseList *Blocklist; |
318 | }; |
319 | } // namespace |
320 | |
321 | PreservedAnalyses SanitizerCoveragePass::run(Module &M, |
322 | ModuleAnalysisManager &MAM) { |
323 | auto &FAM = MAM.getResult<FunctionAnalysisManagerModuleProxy>(IR&: M).getManager(); |
324 | auto DTCallback = [&FAM](Function &F) -> const DominatorTree & { |
325 | return FAM.getResult<DominatorTreeAnalysis>(IR&: F); |
326 | }; |
327 | auto PDTCallback = [&FAM](Function &F) -> const PostDominatorTree & { |
328 | return FAM.getResult<PostDominatorTreeAnalysis>(IR&: F); |
329 | }; |
330 | ModuleSanitizerCoverage ModuleSancov(M, DTCallback, PDTCallback, |
331 | OverrideFromCL(Options), Allowlist.get(), |
332 | Blocklist.get()); |
333 | if (!ModuleSancov.instrumentModule()) |
334 | return PreservedAnalyses::all(); |
335 | |
336 | PreservedAnalyses PA = PreservedAnalyses::none(); |
337 | // GlobalsAA is considered stateless and does not get invalidated unless |
338 | // explicitly invalidated; PreservedAnalyses::none() is not enough. Sanitizers |
339 | // make changes that require GlobalsAA to be invalidated. |
340 | PA.abandon<GlobalsAA>(); |
341 | return PA; |
342 | } |
343 | |
344 | std::pair<Value *, Value *> |
345 | ModuleSanitizerCoverage::CreateSecStartEnd(Module &M, const char *Section, |
346 | Type *Ty) { |
347 | // Use ExternalWeak so that if all sections are discarded due to section |
348 | // garbage collection, the linker will not report undefined symbol errors. |
349 | // Windows defines the start/stop symbols in compiler-rt so no need for |
350 | // ExternalWeak. |
351 | GlobalValue::LinkageTypes Linkage = TargetTriple.isOSBinFormatCOFF() |
352 | ? GlobalVariable::ExternalLinkage |
353 | : GlobalVariable::ExternalWeakLinkage; |
354 | GlobalVariable *SecStart = new GlobalVariable(M, Ty, false, Linkage, nullptr, |
355 | getSectionStart(Section)); |
356 | SecStart->setVisibility(GlobalValue::HiddenVisibility); |
357 | GlobalVariable *SecEnd = new GlobalVariable(M, Ty, false, Linkage, nullptr, |
358 | getSectionEnd(Section)); |
359 | SecEnd->setVisibility(GlobalValue::HiddenVisibility); |
360 | IRBuilder<> IRB(M.getContext()); |
361 | if (!TargetTriple.isOSBinFormatCOFF()) |
362 | return std::make_pair(x&: SecStart, y&: SecEnd); |
363 | |
364 | // Account for the fact that on windows-msvc __start_* symbols actually |
365 | // point to a uint64_t before the start of the array. |
366 | auto GEP = |
367 | IRB.CreatePtrAdd(Ptr: SecStart, Offset: ConstantInt::get(Ty: IntptrTy, V: sizeof(uint64_t))); |
368 | return std::make_pair(x&: GEP, y&: SecEnd); |
369 | } |
370 | |
371 | Function *ModuleSanitizerCoverage::CreateInitCallsForSections( |
372 | Module &M, const char *CtorName, const char *InitFunctionName, Type *Ty, |
373 | const char *Section) { |
374 | if (ClSancovDropCtors) |
375 | return nullptr; |
376 | auto SecStartEnd = CreateSecStartEnd(M, Section, Ty); |
377 | auto SecStart = SecStartEnd.first; |
378 | auto SecEnd = SecStartEnd.second; |
379 | Function *CtorFunc; |
380 | std::tie(args&: CtorFunc, args: std::ignore) = createSanitizerCtorAndInitFunctions( |
381 | M, CtorName, InitName: InitFunctionName, InitArgTypes: {PtrTy, PtrTy}, InitArgs: {SecStart, SecEnd}); |
382 | assert(CtorFunc->getName() == CtorName); |
383 | |
384 | if (TargetTriple.supportsCOMDAT()) { |
385 | // Use comdat to dedup CtorFunc. |
386 | CtorFunc->setComdat(M.getOrInsertComdat(Name: CtorName)); |
387 | appendToGlobalCtors(M, F: CtorFunc, Priority: SanCtorAndDtorPriority, Data: CtorFunc); |
388 | } else { |
389 | appendToGlobalCtors(M, F: CtorFunc, Priority: SanCtorAndDtorPriority); |
390 | } |
391 | |
392 | if (TargetTriple.isOSBinFormatCOFF()) { |
393 | // In COFF files, if the contructors are set as COMDAT (they are because |
394 | // COFF supports COMDAT) and the linker flag /OPT:REF (strip unreferenced |
395 | // functions and data) is used, the constructors get stripped. To prevent |
396 | // this, give the constructors weak ODR linkage and ensure the linker knows |
397 | // to include the sancov constructor. This way the linker can deduplicate |
398 | // the constructors but always leave one copy. |
399 | CtorFunc->setLinkage(GlobalValue::WeakODRLinkage); |
400 | } |
401 | return CtorFunc; |
402 | } |
403 | |
404 | bool ModuleSanitizerCoverage::instrumentModule() { |
405 | if (Options.CoverageType == SanitizerCoverageOptions::SCK_None) |
406 | return false; |
407 | if (Allowlist && |
408 | !Allowlist->inSection(Section: "coverage" , Prefix: "src" , Query: M.getSourceFileName())) |
409 | return false; |
410 | if (Blocklist && |
411 | Blocklist->inSection(Section: "coverage" , Prefix: "src" , Query: M.getSourceFileName())) |
412 | return false; |
413 | C = &(M.getContext()); |
414 | DL = &M.getDataLayout(); |
415 | CurModule = &M; |
416 | TargetTriple = M.getTargetTriple(); |
417 | FunctionGuardArray = nullptr; |
418 | Function8bitCounterArray = nullptr; |
419 | FunctionBoolArray = nullptr; |
420 | FunctionPCsArray = nullptr; |
421 | FunctionCFsArray = nullptr; |
422 | IntptrTy = Type::getIntNTy(C&: *C, N: DL->getPointerSizeInBits()); |
423 | PtrTy = PointerType::getUnqual(C&: *C); |
424 | Type *VoidTy = Type::getVoidTy(C&: *C); |
425 | IRBuilder<> IRB(*C); |
426 | Int64Ty = IRB.getInt64Ty(); |
427 | Int32Ty = IRB.getInt32Ty(); |
428 | Int16Ty = IRB.getInt16Ty(); |
429 | Int8Ty = IRB.getInt8Ty(); |
430 | Int1Ty = IRB.getInt1Ty(); |
431 | |
432 | SanCovTracePCIndir = |
433 | M.getOrInsertFunction(Name: SanCovTracePCIndirName, RetTy: VoidTy, Args: IntptrTy); |
434 | // Make sure smaller parameters are zero-extended to i64 if required by the |
435 | // target ABI. |
436 | AttributeList SanCovTraceCmpZeroExtAL; |
437 | SanCovTraceCmpZeroExtAL = |
438 | SanCovTraceCmpZeroExtAL.addParamAttribute(C&: *C, ArgNo: 0, Kind: Attribute::ZExt); |
439 | SanCovTraceCmpZeroExtAL = |
440 | SanCovTraceCmpZeroExtAL.addParamAttribute(C&: *C, ArgNo: 1, Kind: Attribute::ZExt); |
441 | |
442 | SanCovTraceCmpFunction[0] = |
443 | M.getOrInsertFunction(Name: SanCovTraceCmp1, AttributeList: SanCovTraceCmpZeroExtAL, RetTy: VoidTy, |
444 | Args: IRB.getInt8Ty(), Args: IRB.getInt8Ty()); |
445 | SanCovTraceCmpFunction[1] = |
446 | M.getOrInsertFunction(Name: SanCovTraceCmp2, AttributeList: SanCovTraceCmpZeroExtAL, RetTy: VoidTy, |
447 | Args: IRB.getInt16Ty(), Args: IRB.getInt16Ty()); |
448 | SanCovTraceCmpFunction[2] = |
449 | M.getOrInsertFunction(Name: SanCovTraceCmp4, AttributeList: SanCovTraceCmpZeroExtAL, RetTy: VoidTy, |
450 | Args: IRB.getInt32Ty(), Args: IRB.getInt32Ty()); |
451 | SanCovTraceCmpFunction[3] = |
452 | M.getOrInsertFunction(Name: SanCovTraceCmp8, RetTy: VoidTy, Args: Int64Ty, Args: Int64Ty); |
453 | |
454 | SanCovTraceConstCmpFunction[0] = M.getOrInsertFunction( |
455 | Name: SanCovTraceConstCmp1, AttributeList: SanCovTraceCmpZeroExtAL, RetTy: VoidTy, Args: Int8Ty, Args: Int8Ty); |
456 | SanCovTraceConstCmpFunction[1] = M.getOrInsertFunction( |
457 | Name: SanCovTraceConstCmp2, AttributeList: SanCovTraceCmpZeroExtAL, RetTy: VoidTy, Args: Int16Ty, Args: Int16Ty); |
458 | SanCovTraceConstCmpFunction[2] = M.getOrInsertFunction( |
459 | Name: SanCovTraceConstCmp4, AttributeList: SanCovTraceCmpZeroExtAL, RetTy: VoidTy, Args: Int32Ty, Args: Int32Ty); |
460 | SanCovTraceConstCmpFunction[3] = |
461 | M.getOrInsertFunction(Name: SanCovTraceConstCmp8, RetTy: VoidTy, Args: Int64Ty, Args: Int64Ty); |
462 | |
463 | // Loads. |
464 | SanCovLoadFunction[0] = M.getOrInsertFunction(Name: SanCovLoad1, RetTy: VoidTy, Args: PtrTy); |
465 | SanCovLoadFunction[1] = M.getOrInsertFunction(Name: SanCovLoad2, RetTy: VoidTy, Args: PtrTy); |
466 | SanCovLoadFunction[2] = M.getOrInsertFunction(Name: SanCovLoad4, RetTy: VoidTy, Args: PtrTy); |
467 | SanCovLoadFunction[3] = M.getOrInsertFunction(Name: SanCovLoad8, RetTy: VoidTy, Args: PtrTy); |
468 | SanCovLoadFunction[4] = M.getOrInsertFunction(Name: SanCovLoad16, RetTy: VoidTy, Args: PtrTy); |
469 | // Stores. |
470 | SanCovStoreFunction[0] = M.getOrInsertFunction(Name: SanCovStore1, RetTy: VoidTy, Args: PtrTy); |
471 | SanCovStoreFunction[1] = M.getOrInsertFunction(Name: SanCovStore2, RetTy: VoidTy, Args: PtrTy); |
472 | SanCovStoreFunction[2] = M.getOrInsertFunction(Name: SanCovStore4, RetTy: VoidTy, Args: PtrTy); |
473 | SanCovStoreFunction[3] = M.getOrInsertFunction(Name: SanCovStore8, RetTy: VoidTy, Args: PtrTy); |
474 | SanCovStoreFunction[4] = M.getOrInsertFunction(Name: SanCovStore16, RetTy: VoidTy, Args: PtrTy); |
475 | |
476 | { |
477 | AttributeList AL; |
478 | AL = AL.addParamAttribute(C&: *C, ArgNo: 0, Kind: Attribute::ZExt); |
479 | SanCovTraceDivFunction[0] = |
480 | M.getOrInsertFunction(Name: SanCovTraceDiv4, AttributeList: AL, RetTy: VoidTy, Args: IRB.getInt32Ty()); |
481 | } |
482 | SanCovTraceDivFunction[1] = |
483 | M.getOrInsertFunction(Name: SanCovTraceDiv8, RetTy: VoidTy, Args: Int64Ty); |
484 | SanCovTraceGepFunction = |
485 | M.getOrInsertFunction(Name: SanCovTraceGep, RetTy: VoidTy, Args: IntptrTy); |
486 | SanCovTraceSwitchFunction = |
487 | M.getOrInsertFunction(Name: SanCovTraceSwitchName, RetTy: VoidTy, Args: Int64Ty, Args: PtrTy); |
488 | |
489 | SanCovLowestStack = M.getOrInsertGlobal(Name: SanCovLowestStackName, Ty: IntptrTy); |
490 | if (SanCovLowestStack->getValueType() != IntptrTy) { |
491 | C->emitError(ErrorStr: StringRef("'" ) + SanCovLowestStackName + |
492 | "' should not be declared by the user" ); |
493 | return true; |
494 | } |
495 | SanCovLowestStack->setThreadLocalMode( |
496 | GlobalValue::ThreadLocalMode::InitialExecTLSModel); |
497 | if (Options.StackDepth && !SanCovLowestStack->isDeclaration()) |
498 | SanCovLowestStack->setInitializer(Constant::getAllOnesValue(Ty: IntptrTy)); |
499 | |
500 | if (Options.GatedCallbacks) { |
501 | if (!Options.TracePCGuard && !Options.TraceCmp) { |
502 | C->emitError(ErrorStr: StringRef("'" ) + ClGatedCallbacks.ArgStr + |
503 | "' is only supported with trace-pc-guard or trace-cmp" ); |
504 | return true; |
505 | } |
506 | |
507 | SanCovCallbackGate = cast<GlobalVariable>( |
508 | Val: M.getOrInsertGlobal(Name: SanCovCallbackGateName, Ty: Int64Ty)); |
509 | SanCovCallbackGate->setSection( |
510 | getSectionName(Section: SanCovCallbackGateSectionName)); |
511 | SanCovCallbackGate->setInitializer(Constant::getNullValue(Ty: Int64Ty)); |
512 | SanCovCallbackGate->setLinkage(GlobalVariable::LinkOnceAnyLinkage); |
513 | SanCovCallbackGate->setVisibility(GlobalVariable::HiddenVisibility); |
514 | appendToCompilerUsed(M, Values: SanCovCallbackGate); |
515 | } |
516 | |
517 | SanCovTracePC = M.getOrInsertFunction(Name: SanCovTracePCName, RetTy: VoidTy); |
518 | SanCovTracePCGuard = |
519 | M.getOrInsertFunction(Name: SanCovTracePCGuardName, RetTy: VoidTy, Args: PtrTy); |
520 | |
521 | SanCovStackDepthCallback = |
522 | M.getOrInsertFunction(Name: SanCovStackDepthCallbackName, RetTy: VoidTy); |
523 | |
524 | for (auto &F : M) |
525 | instrumentFunction(F); |
526 | |
527 | Function *Ctor = nullptr; |
528 | |
529 | if (FunctionGuardArray) |
530 | Ctor = CreateInitCallsForSections(M, CtorName: SanCovModuleCtorTracePcGuardName, |
531 | InitFunctionName: SanCovTracePCGuardInitName, Ty: Int32Ty, |
532 | Section: SanCovGuardsSectionName); |
533 | if (Function8bitCounterArray) |
534 | Ctor = CreateInitCallsForSections(M, CtorName: SanCovModuleCtor8bitCountersName, |
535 | InitFunctionName: SanCov8bitCountersInitName, Ty: Int8Ty, |
536 | Section: SanCovCountersSectionName); |
537 | if (FunctionBoolArray) { |
538 | Ctor = CreateInitCallsForSections(M, CtorName: SanCovModuleCtorBoolFlagName, |
539 | InitFunctionName: SanCovBoolFlagInitName, Ty: Int1Ty, |
540 | Section: SanCovBoolFlagSectionName); |
541 | } |
542 | if (Ctor && Options.PCTable) { |
543 | auto SecStartEnd = CreateSecStartEnd(M, Section: SanCovPCsSectionName, Ty: IntptrTy); |
544 | FunctionCallee InitFunction = |
545 | declareSanitizerInitFunction(M, InitName: SanCovPCsInitName, InitArgTypes: {PtrTy, PtrTy}); |
546 | IRBuilder<> IRBCtor(Ctor->getEntryBlock().getTerminator()); |
547 | IRBCtor.CreateCall(Callee: InitFunction, Args: {SecStartEnd.first, SecStartEnd.second}); |
548 | } |
549 | |
550 | if (Ctor && Options.CollectControlFlow) { |
551 | auto SecStartEnd = CreateSecStartEnd(M, Section: SanCovCFsSectionName, Ty: IntptrTy); |
552 | FunctionCallee InitFunction = |
553 | declareSanitizerInitFunction(M, InitName: SanCovCFsInitName, InitArgTypes: {PtrTy, PtrTy}); |
554 | IRBuilder<> IRBCtor(Ctor->getEntryBlock().getTerminator()); |
555 | IRBCtor.CreateCall(Callee: InitFunction, Args: {SecStartEnd.first, SecStartEnd.second}); |
556 | } |
557 | |
558 | appendToUsed(M, Values: GlobalsToAppendToUsed); |
559 | appendToCompilerUsed(M, Values: GlobalsToAppendToCompilerUsed); |
560 | return true; |
561 | } |
562 | |
563 | // True if block has successors and it dominates all of them. |
564 | static bool isFullDominator(const BasicBlock *BB, const DominatorTree &DT) { |
565 | if (succ_empty(BB)) |
566 | return false; |
567 | |
568 | return llvm::all_of(Range: successors(BB), P: [&](const BasicBlock *SUCC) { |
569 | return DT.dominates(A: BB, B: SUCC); |
570 | }); |
571 | } |
572 | |
573 | // True if block has predecessors and it postdominates all of them. |
574 | static bool isFullPostDominator(const BasicBlock *BB, |
575 | const PostDominatorTree &PDT) { |
576 | if (pred_empty(BB)) |
577 | return false; |
578 | |
579 | return llvm::all_of(Range: predecessors(BB), P: [&](const BasicBlock *PRED) { |
580 | return PDT.dominates(A: BB, B: PRED); |
581 | }); |
582 | } |
583 | |
584 | static bool shouldInstrumentBlock(const Function &F, const BasicBlock *BB, |
585 | const DominatorTree &DT, |
586 | const PostDominatorTree &PDT, |
587 | const SanitizerCoverageOptions &Options) { |
588 | // Don't insert coverage for blocks containing nothing but unreachable: we |
589 | // will never call __sanitizer_cov() for them, so counting them in |
590 | // NumberOfInstrumentedBlocks() might complicate calculation of code coverage |
591 | // percentage. Also, unreachable instructions frequently have no debug |
592 | // locations. |
593 | if (isa<UnreachableInst>(Val: BB->getFirstNonPHIOrDbgOrLifetime())) |
594 | return false; |
595 | |
596 | // Don't insert coverage into blocks without a valid insertion point |
597 | // (catchswitch blocks). |
598 | if (BB->getFirstInsertionPt() == BB->end()) |
599 | return false; |
600 | |
601 | if (Options.NoPrune || &F.getEntryBlock() == BB) |
602 | return true; |
603 | |
604 | if (Options.CoverageType == SanitizerCoverageOptions::SCK_Function && |
605 | &F.getEntryBlock() != BB) |
606 | return false; |
607 | |
608 | // Do not instrument full dominators, or full post-dominators with multiple |
609 | // predecessors. |
610 | return !isFullDominator(BB, DT) && |
611 | !(isFullPostDominator(BB, PDT) && !BB->getSinglePredecessor()); |
612 | } |
613 | |
614 | // Returns true iff From->To is a backedge. |
615 | // A twist here is that we treat From->To as a backedge if |
616 | // * To dominates From or |
617 | // * To->UniqueSuccessor dominates From |
618 | static bool IsBackEdge(BasicBlock *From, BasicBlock *To, |
619 | const DominatorTree &DT) { |
620 | if (DT.dominates(A: To, B: From)) |
621 | return true; |
622 | if (auto Next = To->getUniqueSuccessor()) |
623 | if (DT.dominates(A: Next, B: From)) |
624 | return true; |
625 | return false; |
626 | } |
627 | |
628 | // Prunes uninteresting Cmp instrumentation: |
629 | // * CMP instructions that feed into loop backedge branch. |
630 | // |
631 | // Note that Cmp pruning is controlled by the same flag as the |
632 | // BB pruning. |
633 | static bool IsInterestingCmp(ICmpInst *CMP, const DominatorTree &DT, |
634 | const SanitizerCoverageOptions &Options) { |
635 | if (!Options.NoPrune) |
636 | if (CMP->hasOneUse()) |
637 | if (auto BR = dyn_cast<BranchInst>(Val: CMP->user_back())) |
638 | for (BasicBlock *B : BR->successors()) |
639 | if (IsBackEdge(From: BR->getParent(), To: B, DT)) |
640 | return false; |
641 | return true; |
642 | } |
643 | |
644 | void ModuleSanitizerCoverage::instrumentFunction(Function &F) { |
645 | if (F.empty()) |
646 | return; |
647 | if (F.getName().contains(Other: ".module_ctor" )) |
648 | return; // Should not instrument sanitizer init functions. |
649 | if (F.getName().starts_with(Prefix: "__sanitizer_" )) |
650 | return; // Don't instrument __sanitizer_* callbacks. |
651 | // Don't touch available_externally functions, their actual body is elewhere. |
652 | if (F.getLinkage() == GlobalValue::AvailableExternallyLinkage) |
653 | return; |
654 | // Don't instrument MSVC CRT configuration helpers. They may run before normal |
655 | // initialization. |
656 | if (F.getName() == "__local_stdio_printf_options" || |
657 | F.getName() == "__local_stdio_scanf_options" ) |
658 | return; |
659 | if (isa<UnreachableInst>(Val: F.getEntryBlock().getTerminator())) |
660 | return; |
661 | // Don't instrument functions using SEH for now. Splitting basic blocks like |
662 | // we do for coverage breaks WinEHPrepare. |
663 | // FIXME: Remove this when SEH no longer uses landingpad pattern matching. |
664 | if (F.hasPersonalityFn() && |
665 | isAsynchronousEHPersonality(Pers: classifyEHPersonality(Pers: F.getPersonalityFn()))) |
666 | return; |
667 | if (Allowlist && !Allowlist->inSection(Section: "coverage" , Prefix: "fun" , Query: F.getName())) |
668 | return; |
669 | if (Blocklist && Blocklist->inSection(Section: "coverage" , Prefix: "fun" , Query: F.getName())) |
670 | return; |
671 | // Do not apply any instrumentation for naked functions. |
672 | if (F.hasFnAttribute(Kind: Attribute::Naked)) |
673 | return; |
674 | if (F.hasFnAttribute(Kind: Attribute::NoSanitizeCoverage)) |
675 | return; |
676 | if (F.hasFnAttribute(Kind: Attribute::DisableSanitizerInstrumentation)) |
677 | return; |
678 | if (Options.CoverageType >= SanitizerCoverageOptions::SCK_Edge) { |
679 | SplitAllCriticalEdges( |
680 | F, Options: CriticalEdgeSplittingOptions().setIgnoreUnreachableDests()); |
681 | } |
682 | SmallVector<Instruction *, 8> IndirCalls; |
683 | SmallVector<BasicBlock *, 16> BlocksToInstrument; |
684 | SmallVector<Instruction *, 8> CmpTraceTargets; |
685 | SmallVector<Instruction *, 8> SwitchTraceTargets; |
686 | SmallVector<BinaryOperator *, 8> DivTraceTargets; |
687 | SmallVector<GetElementPtrInst *, 8> GepTraceTargets; |
688 | SmallVector<LoadInst *, 8> Loads; |
689 | SmallVector<StoreInst *, 8> Stores; |
690 | |
691 | const DominatorTree &DT = DTCallback(F); |
692 | const PostDominatorTree &PDT = PDTCallback(F); |
693 | bool IsLeafFunc = true; |
694 | |
695 | for (auto &BB : F) { |
696 | if (shouldInstrumentBlock(F, BB: &BB, DT, PDT, Options)) |
697 | BlocksToInstrument.push_back(Elt: &BB); |
698 | for (auto &Inst : BB) { |
699 | if (Options.IndirectCalls) { |
700 | CallBase *CB = dyn_cast<CallBase>(Val: &Inst); |
701 | if (CB && CB->isIndirectCall()) |
702 | IndirCalls.push_back(Elt: &Inst); |
703 | } |
704 | if (Options.TraceCmp) { |
705 | if (ICmpInst *CMP = dyn_cast<ICmpInst>(Val: &Inst)) |
706 | if (IsInterestingCmp(CMP, DT, Options)) |
707 | CmpTraceTargets.push_back(Elt: &Inst); |
708 | if (isa<SwitchInst>(Val: &Inst)) |
709 | SwitchTraceTargets.push_back(Elt: &Inst); |
710 | } |
711 | if (Options.TraceDiv) |
712 | if (BinaryOperator *BO = dyn_cast<BinaryOperator>(Val: &Inst)) |
713 | if (BO->getOpcode() == Instruction::SDiv || |
714 | BO->getOpcode() == Instruction::UDiv) |
715 | DivTraceTargets.push_back(Elt: BO); |
716 | if (Options.TraceGep) |
717 | if (GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(Val: &Inst)) |
718 | GepTraceTargets.push_back(Elt: GEP); |
719 | if (Options.TraceLoads) |
720 | if (LoadInst *LI = dyn_cast<LoadInst>(Val: &Inst)) |
721 | Loads.push_back(Elt: LI); |
722 | if (Options.TraceStores) |
723 | if (StoreInst *SI = dyn_cast<StoreInst>(Val: &Inst)) |
724 | Stores.push_back(Elt: SI); |
725 | if (Options.StackDepth) |
726 | if (isa<InvokeInst>(Val: Inst) || |
727 | (isa<CallInst>(Val: Inst) && !isa<IntrinsicInst>(Val: Inst))) |
728 | IsLeafFunc = false; |
729 | } |
730 | } |
731 | |
732 | if (Options.CollectControlFlow) |
733 | createFunctionControlFlow(F); |
734 | |
735 | Value *FunctionGateCmp = nullptr; |
736 | InjectCoverage(F, AllBlocks: BlocksToInstrument, FunctionGateCmp, IsLeafFunc); |
737 | InjectCoverageForIndirectCalls(F, IndirCalls); |
738 | InjectTraceForCmp(F, CmpTraceTargets, FunctionGateCmp); |
739 | InjectTraceForSwitch(F, SwitchTraceTargets, FunctionGateCmp); |
740 | InjectTraceForDiv(F, DivTraceTargets); |
741 | InjectTraceForGep(F, GepTraceTargets); |
742 | InjectTraceForLoadsAndStores(F, Loads, Stores); |
743 | } |
744 | |
745 | GlobalVariable *ModuleSanitizerCoverage::CreateFunctionLocalArrayInSection( |
746 | size_t NumElements, Function &F, Type *Ty, const char *Section) { |
747 | ArrayType *ArrayTy = ArrayType::get(ElementType: Ty, NumElements); |
748 | auto Array = new GlobalVariable( |
749 | *CurModule, ArrayTy, false, GlobalVariable::PrivateLinkage, |
750 | Constant::getNullValue(Ty: ArrayTy), "__sancov_gen_" ); |
751 | |
752 | if (TargetTriple.supportsCOMDAT() && |
753 | (F.hasComdat() || TargetTriple.isOSBinFormatELF() || !F.isInterposable())) |
754 | if (auto Comdat = getOrCreateFunctionComdat(F, T&: TargetTriple)) |
755 | Array->setComdat(Comdat); |
756 | Array->setSection(getSectionName(Section)); |
757 | Array->setAlignment(Align(DL->getTypeStoreSize(Ty).getFixedValue())); |
758 | |
759 | // sancov_pcs parallels the other metadata section(s). Optimizers (e.g. |
760 | // GlobalOpt/ConstantMerge) may not discard sancov_pcs and the other |
761 | // section(s) as a unit, so we conservatively retain all unconditionally in |
762 | // the compiler. |
763 | // |
764 | // With comdat (COFF/ELF), the linker can guarantee the associated sections |
765 | // will be retained or discarded as a unit, so llvm.compiler.used is |
766 | // sufficient. Otherwise, conservatively make all of them retained by the |
767 | // linker. |
768 | if (Array->hasComdat()) |
769 | GlobalsToAppendToCompilerUsed.push_back(Elt: Array); |
770 | else |
771 | GlobalsToAppendToUsed.push_back(Elt: Array); |
772 | |
773 | return Array; |
774 | } |
775 | |
776 | GlobalVariable * |
777 | ModuleSanitizerCoverage::CreatePCArray(Function &F, |
778 | ArrayRef<BasicBlock *> AllBlocks) { |
779 | size_t N = AllBlocks.size(); |
780 | assert(N); |
781 | SmallVector<Constant *, 32> PCs; |
782 | IRBuilder<> IRB(&*F.getEntryBlock().getFirstInsertionPt()); |
783 | for (size_t i = 0; i < N; i++) { |
784 | if (&F.getEntryBlock() == AllBlocks[i]) { |
785 | PCs.push_back(Elt: (Constant *)IRB.CreatePointerCast(V: &F, DestTy: PtrTy)); |
786 | PCs.push_back( |
787 | Elt: (Constant *)IRB.CreateIntToPtr(V: ConstantInt::get(Ty: IntptrTy, V: 1), DestTy: PtrTy)); |
788 | } else { |
789 | PCs.push_back(Elt: (Constant *)IRB.CreatePointerCast( |
790 | V: BlockAddress::get(BB: AllBlocks[i]), DestTy: PtrTy)); |
791 | PCs.push_back(Elt: Constant::getNullValue(Ty: PtrTy)); |
792 | } |
793 | } |
794 | auto *PCArray = |
795 | CreateFunctionLocalArrayInSection(NumElements: N * 2, F, Ty: PtrTy, Section: SanCovPCsSectionName); |
796 | PCArray->setInitializer( |
797 | ConstantArray::get(T: ArrayType::get(ElementType: PtrTy, NumElements: N * 2), V: PCs)); |
798 | PCArray->setConstant(true); |
799 | |
800 | return PCArray; |
801 | } |
802 | |
803 | void ModuleSanitizerCoverage::CreateFunctionLocalArrays( |
804 | Function &F, ArrayRef<BasicBlock *> AllBlocks) { |
805 | if (Options.TracePCGuard) |
806 | FunctionGuardArray = CreateFunctionLocalArrayInSection( |
807 | NumElements: AllBlocks.size(), F, Ty: Int32Ty, Section: SanCovGuardsSectionName); |
808 | |
809 | if (Options.Inline8bitCounters) |
810 | Function8bitCounterArray = CreateFunctionLocalArrayInSection( |
811 | NumElements: AllBlocks.size(), F, Ty: Int8Ty, Section: SanCovCountersSectionName); |
812 | if (Options.InlineBoolFlag) |
813 | FunctionBoolArray = CreateFunctionLocalArrayInSection( |
814 | NumElements: AllBlocks.size(), F, Ty: Int1Ty, Section: SanCovBoolFlagSectionName); |
815 | |
816 | if (Options.PCTable) |
817 | FunctionPCsArray = CreatePCArray(F, AllBlocks); |
818 | } |
819 | |
820 | Value *ModuleSanitizerCoverage::CreateFunctionLocalGateCmp(IRBuilder<> &IRB) { |
821 | auto Load = IRB.CreateLoad(Ty: Int64Ty, Ptr: SanCovCallbackGate); |
822 | Load->setNoSanitizeMetadata(); |
823 | auto Cmp = IRB.CreateIsNotNull(Arg: Load); |
824 | Cmp->setName("sancov gate cmp" ); |
825 | return Cmp; |
826 | } |
827 | |
828 | Instruction *ModuleSanitizerCoverage::CreateGateBranch(Function &F, |
829 | Value *&FunctionGateCmp, |
830 | Instruction *IP) { |
831 | if (!FunctionGateCmp) { |
832 | // Create this in the entry block |
833 | BasicBlock &BB = F.getEntryBlock(); |
834 | BasicBlock::iterator IP = BB.getFirstInsertionPt(); |
835 | IP = PrepareToSplitEntryBlock(BB, IP); |
836 | IRBuilder<> EntryIRB(&*IP); |
837 | FunctionGateCmp = CreateFunctionLocalGateCmp(IRB&: EntryIRB); |
838 | } |
839 | // Set the branch weights in order to minimize the price paid when the |
840 | // gate is turned off, allowing the default enablement of this |
841 | // instrumentation with as little of a performance cost as possible |
842 | auto Weights = MDBuilder(*C).createBranchWeights(TrueWeight: 1, FalseWeight: 100000); |
843 | return SplitBlockAndInsertIfThen(Cond: FunctionGateCmp, SplitBefore: IP, Unreachable: false, BranchWeights: Weights); |
844 | } |
845 | |
846 | bool ModuleSanitizerCoverage::InjectCoverage(Function &F, |
847 | ArrayRef<BasicBlock *> AllBlocks, |
848 | Value *&FunctionGateCmp, |
849 | bool IsLeafFunc) { |
850 | if (AllBlocks.empty()) |
851 | return false; |
852 | CreateFunctionLocalArrays(F, AllBlocks); |
853 | for (size_t i = 0, N = AllBlocks.size(); i < N; i++) |
854 | InjectCoverageAtBlock(F, BB&: *AllBlocks[i], Idx: i, FunctionGateCmp, IsLeafFunc); |
855 | return true; |
856 | } |
857 | |
858 | // On every indirect call we call a run-time function |
859 | // __sanitizer_cov_indir_call* with two parameters: |
860 | // - callee address, |
861 | // - global cache array that contains CacheSize pointers (zero-initialized). |
862 | // The cache is used to speed up recording the caller-callee pairs. |
863 | // The address of the caller is passed implicitly via caller PC. |
864 | // CacheSize is encoded in the name of the run-time function. |
865 | void ModuleSanitizerCoverage::InjectCoverageForIndirectCalls( |
866 | Function &F, ArrayRef<Instruction *> IndirCalls) { |
867 | if (IndirCalls.empty()) |
868 | return; |
869 | assert(Options.TracePC || Options.TracePCGuard || |
870 | Options.Inline8bitCounters || Options.InlineBoolFlag); |
871 | for (auto *I : IndirCalls) { |
872 | InstrumentationIRBuilder IRB(I); |
873 | CallBase &CB = cast<CallBase>(Val&: *I); |
874 | Value *Callee = CB.getCalledOperand(); |
875 | if (isa<InlineAsm>(Val: Callee)) |
876 | continue; |
877 | IRB.CreateCall(Callee: SanCovTracePCIndir, Args: IRB.CreatePointerCast(V: Callee, DestTy: IntptrTy)); |
878 | } |
879 | } |
880 | |
881 | // For every switch statement we insert a call: |
882 | // __sanitizer_cov_trace_switch(CondValue, |
883 | // {NumCases, ValueSizeInBits, Case0Value, Case1Value, Case2Value, ... }) |
884 | |
885 | void ModuleSanitizerCoverage::InjectTraceForSwitch( |
886 | Function &F, ArrayRef<Instruction *> SwitchTraceTargets, |
887 | Value *&FunctionGateCmp) { |
888 | for (auto *I : SwitchTraceTargets) { |
889 | if (SwitchInst *SI = dyn_cast<SwitchInst>(Val: I)) { |
890 | InstrumentationIRBuilder IRB(I); |
891 | SmallVector<Constant *, 16> Initializers; |
892 | Value *Cond = SI->getCondition(); |
893 | if (Cond->getType()->getScalarSizeInBits() > |
894 | Int64Ty->getScalarSizeInBits()) |
895 | continue; |
896 | Initializers.push_back(Elt: ConstantInt::get(Ty: Int64Ty, V: SI->getNumCases())); |
897 | Initializers.push_back( |
898 | Elt: ConstantInt::get(Ty: Int64Ty, V: Cond->getType()->getScalarSizeInBits())); |
899 | if (Cond->getType()->getScalarSizeInBits() < |
900 | Int64Ty->getScalarSizeInBits()) |
901 | Cond = IRB.CreateIntCast(V: Cond, DestTy: Int64Ty, isSigned: false); |
902 | for (auto It : SI->cases()) { |
903 | ConstantInt *C = It.getCaseValue(); |
904 | if (C->getType()->getScalarSizeInBits() < 64) |
905 | C = ConstantInt::get(Context&: C->getContext(), V: C->getValue().zext(width: 64)); |
906 | Initializers.push_back(Elt: C); |
907 | } |
908 | llvm::sort(C: drop_begin(RangeOrContainer&: Initializers, N: 2), |
909 | Comp: [](const Constant *A, const Constant *B) { |
910 | return cast<ConstantInt>(Val: A)->getLimitedValue() < |
911 | cast<ConstantInt>(Val: B)->getLimitedValue(); |
912 | }); |
913 | ArrayType *ArrayOfInt64Ty = ArrayType::get(ElementType: Int64Ty, NumElements: Initializers.size()); |
914 | GlobalVariable *GV = new GlobalVariable( |
915 | *CurModule, ArrayOfInt64Ty, false, GlobalVariable::InternalLinkage, |
916 | ConstantArray::get(T: ArrayOfInt64Ty, V: Initializers), |
917 | "__sancov_gen_cov_switch_values" ); |
918 | if (Options.GatedCallbacks) { |
919 | auto GateBranch = CreateGateBranch(F, FunctionGateCmp, IP: I); |
920 | IRBuilder<> GateIRB(GateBranch); |
921 | GateIRB.CreateCall(Callee: SanCovTraceSwitchFunction, Args: {Cond, GV}); |
922 | } else { |
923 | IRB.CreateCall(Callee: SanCovTraceSwitchFunction, Args: {Cond, GV}); |
924 | } |
925 | } |
926 | } |
927 | } |
928 | |
929 | void ModuleSanitizerCoverage::InjectTraceForDiv( |
930 | Function &, ArrayRef<BinaryOperator *> DivTraceTargets) { |
931 | for (auto *BO : DivTraceTargets) { |
932 | InstrumentationIRBuilder IRB(BO); |
933 | Value *A1 = BO->getOperand(i_nocapture: 1); |
934 | if (isa<ConstantInt>(Val: A1)) |
935 | continue; |
936 | if (!A1->getType()->isIntegerTy()) |
937 | continue; |
938 | uint64_t TypeSize = DL->getTypeStoreSizeInBits(Ty: A1->getType()); |
939 | int CallbackIdx = TypeSize == 32 ? 0 : TypeSize == 64 ? 1 : -1; |
940 | if (CallbackIdx < 0) |
941 | continue; |
942 | auto Ty = Type::getIntNTy(C&: *C, N: TypeSize); |
943 | IRB.CreateCall(Callee: SanCovTraceDivFunction[CallbackIdx], |
944 | Args: {IRB.CreateIntCast(V: A1, DestTy: Ty, isSigned: true)}); |
945 | } |
946 | } |
947 | |
948 | void ModuleSanitizerCoverage::InjectTraceForGep( |
949 | Function &, ArrayRef<GetElementPtrInst *> GepTraceTargets) { |
950 | for (auto *GEP : GepTraceTargets) { |
951 | InstrumentationIRBuilder IRB(GEP); |
952 | for (Use &Idx : GEP->indices()) |
953 | if (!isa<ConstantInt>(Val: Idx) && Idx->getType()->isIntegerTy()) |
954 | IRB.CreateCall(Callee: SanCovTraceGepFunction, |
955 | Args: {IRB.CreateIntCast(V: Idx, DestTy: IntptrTy, isSigned: true)}); |
956 | } |
957 | } |
958 | |
959 | void ModuleSanitizerCoverage::InjectTraceForLoadsAndStores( |
960 | Function &, ArrayRef<LoadInst *> Loads, ArrayRef<StoreInst *> Stores) { |
961 | auto CallbackIdx = [&](Type *ElementTy) -> int { |
962 | uint64_t TypeSize = DL->getTypeStoreSizeInBits(Ty: ElementTy); |
963 | return TypeSize == 8 ? 0 |
964 | : TypeSize == 16 ? 1 |
965 | : TypeSize == 32 ? 2 |
966 | : TypeSize == 64 ? 3 |
967 | : TypeSize == 128 ? 4 |
968 | : -1; |
969 | }; |
970 | for (auto *LI : Loads) { |
971 | InstrumentationIRBuilder IRB(LI); |
972 | auto Ptr = LI->getPointerOperand(); |
973 | int Idx = CallbackIdx(LI->getType()); |
974 | if (Idx < 0) |
975 | continue; |
976 | IRB.CreateCall(Callee: SanCovLoadFunction[Idx], Args: Ptr); |
977 | } |
978 | for (auto *SI : Stores) { |
979 | InstrumentationIRBuilder IRB(SI); |
980 | auto Ptr = SI->getPointerOperand(); |
981 | int Idx = CallbackIdx(SI->getValueOperand()->getType()); |
982 | if (Idx < 0) |
983 | continue; |
984 | IRB.CreateCall(Callee: SanCovStoreFunction[Idx], Args: Ptr); |
985 | } |
986 | } |
987 | |
988 | void ModuleSanitizerCoverage::InjectTraceForCmp( |
989 | Function &F, ArrayRef<Instruction *> CmpTraceTargets, |
990 | Value *&FunctionGateCmp) { |
991 | for (auto *I : CmpTraceTargets) { |
992 | if (ICmpInst *ICMP = dyn_cast<ICmpInst>(Val: I)) { |
993 | InstrumentationIRBuilder IRB(ICMP); |
994 | Value *A0 = ICMP->getOperand(i_nocapture: 0); |
995 | Value *A1 = ICMP->getOperand(i_nocapture: 1); |
996 | if (!A0->getType()->isIntegerTy()) |
997 | continue; |
998 | uint64_t TypeSize = DL->getTypeStoreSizeInBits(Ty: A0->getType()); |
999 | int CallbackIdx = TypeSize == 8 ? 0 |
1000 | : TypeSize == 16 ? 1 |
1001 | : TypeSize == 32 ? 2 |
1002 | : TypeSize == 64 ? 3 |
1003 | : -1; |
1004 | if (CallbackIdx < 0) |
1005 | continue; |
1006 | // __sanitizer_cov_trace_cmp((type_size << 32) | predicate, A0, A1); |
1007 | auto CallbackFunc = SanCovTraceCmpFunction[CallbackIdx]; |
1008 | bool FirstIsConst = isa<ConstantInt>(Val: A0); |
1009 | bool SecondIsConst = isa<ConstantInt>(Val: A1); |
1010 | // If both are const, then we don't need such a comparison. |
1011 | if (FirstIsConst && SecondIsConst) |
1012 | continue; |
1013 | // If only one is const, then make it the first callback argument. |
1014 | if (FirstIsConst || SecondIsConst) { |
1015 | CallbackFunc = SanCovTraceConstCmpFunction[CallbackIdx]; |
1016 | if (SecondIsConst) |
1017 | std::swap(a&: A0, b&: A1); |
1018 | } |
1019 | |
1020 | auto Ty = Type::getIntNTy(C&: *C, N: TypeSize); |
1021 | if (Options.GatedCallbacks) { |
1022 | auto GateBranch = CreateGateBranch(F, FunctionGateCmp, IP: I); |
1023 | IRBuilder<> GateIRB(GateBranch); |
1024 | GateIRB.CreateCall(Callee: CallbackFunc, Args: {GateIRB.CreateIntCast(V: A0, DestTy: Ty, isSigned: true), |
1025 | GateIRB.CreateIntCast(V: A1, DestTy: Ty, isSigned: true)}); |
1026 | } else { |
1027 | IRB.CreateCall(Callee: CallbackFunc, Args: {IRB.CreateIntCast(V: A0, DestTy: Ty, isSigned: true), |
1028 | IRB.CreateIntCast(V: A1, DestTy: Ty, isSigned: true)}); |
1029 | } |
1030 | } |
1031 | } |
1032 | } |
1033 | |
1034 | void ModuleSanitizerCoverage::InjectCoverageAtBlock(Function &F, BasicBlock &BB, |
1035 | size_t Idx, |
1036 | Value *&FunctionGateCmp, |
1037 | bool IsLeafFunc) { |
1038 | BasicBlock::iterator IP = BB.getFirstInsertionPt(); |
1039 | bool IsEntryBB = &BB == &F.getEntryBlock(); |
1040 | DebugLoc EntryLoc; |
1041 | if (IsEntryBB) { |
1042 | if (auto SP = F.getSubprogram()) |
1043 | EntryLoc = DILocation::get(Context&: SP->getContext(), Line: SP->getScopeLine(), Column: 0, Scope: SP); |
1044 | // Keep static allocas and llvm.localescape calls in the entry block. Even |
1045 | // if we aren't splitting the block, it's nice for allocas to be before |
1046 | // calls. |
1047 | IP = PrepareToSplitEntryBlock(BB, IP); |
1048 | } |
1049 | |
1050 | InstrumentationIRBuilder IRB(&*IP); |
1051 | if (EntryLoc) |
1052 | IRB.SetCurrentDebugLocation(EntryLoc); |
1053 | if (Options.TracePC) { |
1054 | IRB.CreateCall(Callee: SanCovTracePC) |
1055 | ->setCannotMerge(); // gets the PC using GET_CALLER_PC. |
1056 | } |
1057 | if (Options.TracePCGuard) { |
1058 | auto GuardPtr = IRB.CreateConstInBoundsGEP2_64( |
1059 | Ty: FunctionGuardArray->getValueType(), Ptr: FunctionGuardArray, Idx0: 0, Idx1: Idx); |
1060 | if (Options.GatedCallbacks) { |
1061 | Instruction *I = &*IP; |
1062 | auto GateBranch = CreateGateBranch(F, FunctionGateCmp, IP: I); |
1063 | IRBuilder<> GateIRB(GateBranch); |
1064 | GateIRB.CreateCall(Callee: SanCovTracePCGuard, Args: GuardPtr)->setCannotMerge(); |
1065 | } else { |
1066 | IRB.CreateCall(Callee: SanCovTracePCGuard, Args: GuardPtr)->setCannotMerge(); |
1067 | } |
1068 | } |
1069 | if (Options.Inline8bitCounters) { |
1070 | auto CounterPtr = IRB.CreateGEP( |
1071 | Ty: Function8bitCounterArray->getValueType(), Ptr: Function8bitCounterArray, |
1072 | IdxList: {ConstantInt::get(Ty: IntptrTy, V: 0), ConstantInt::get(Ty: IntptrTy, V: Idx)}); |
1073 | auto Load = IRB.CreateLoad(Ty: Int8Ty, Ptr: CounterPtr); |
1074 | auto Inc = IRB.CreateAdd(LHS: Load, RHS: ConstantInt::get(Ty: Int8Ty, V: 1)); |
1075 | auto Store = IRB.CreateStore(Val: Inc, Ptr: CounterPtr); |
1076 | Load->setNoSanitizeMetadata(); |
1077 | Store->setNoSanitizeMetadata(); |
1078 | } |
1079 | if (Options.InlineBoolFlag) { |
1080 | auto FlagPtr = IRB.CreateGEP( |
1081 | Ty: FunctionBoolArray->getValueType(), Ptr: FunctionBoolArray, |
1082 | IdxList: {ConstantInt::get(Ty: IntptrTy, V: 0), ConstantInt::get(Ty: IntptrTy, V: Idx)}); |
1083 | auto Load = IRB.CreateLoad(Ty: Int1Ty, Ptr: FlagPtr); |
1084 | auto ThenTerm = SplitBlockAndInsertIfThen( |
1085 | Cond: IRB.CreateIsNull(Arg: Load), SplitBefore: &*IP, Unreachable: false, |
1086 | BranchWeights: MDBuilder(IRB.getContext()).createUnlikelyBranchWeights()); |
1087 | IRBuilder<> ThenIRB(ThenTerm); |
1088 | auto Store = ThenIRB.CreateStore(Val: ConstantInt::getTrue(Ty: Int1Ty), Ptr: FlagPtr); |
1089 | Load->setNoSanitizeMetadata(); |
1090 | Store->setNoSanitizeMetadata(); |
1091 | } |
1092 | if (Options.StackDepth && IsEntryBB && !IsLeafFunc) { |
1093 | Module *M = F.getParent(); |
1094 | const DataLayout &DL = M->getDataLayout(); |
1095 | |
1096 | if (Options.StackDepthCallbackMin) { |
1097 | // In callback mode, only add call when stack depth reaches minimum. |
1098 | int EstimatedStackSize = 0; |
1099 | // If dynamic alloca found, always add call. |
1100 | bool HasDynamicAlloc = false; |
1101 | // Find an insertion point after last "alloca". |
1102 | llvm::Instruction *InsertBefore = nullptr; |
1103 | |
1104 | // Examine all allocas in the basic block. since we're too early |
1105 | // to have results from Intrinsic::frameaddress, we have to manually |
1106 | // estimate the stack size. |
1107 | for (auto &I : BB) { |
1108 | if (auto *AI = dyn_cast<AllocaInst>(Val: &I)) { |
1109 | // Move potential insertion point past the "alloca". |
1110 | InsertBefore = AI->getNextNode(); |
1111 | |
1112 | // Make an estimate on the stack usage. |
1113 | if (AI->isStaticAlloca()) { |
1114 | uint32_t Bytes = DL.getTypeAllocSize(Ty: AI->getAllocatedType()); |
1115 | if (AI->isArrayAllocation()) { |
1116 | if (const ConstantInt *arraySize = |
1117 | dyn_cast<ConstantInt>(Val: AI->getArraySize())) { |
1118 | Bytes *= arraySize->getZExtValue(); |
1119 | } else { |
1120 | HasDynamicAlloc = true; |
1121 | } |
1122 | } |
1123 | EstimatedStackSize += Bytes; |
1124 | } else { |
1125 | HasDynamicAlloc = true; |
1126 | } |
1127 | } |
1128 | } |
1129 | |
1130 | if (HasDynamicAlloc || |
1131 | EstimatedStackSize >= Options.StackDepthCallbackMin) { |
1132 | if (InsertBefore) |
1133 | IRB.SetInsertPoint(InsertBefore); |
1134 | IRB.CreateCall(Callee: SanCovStackDepthCallback)->setCannotMerge(); |
1135 | } |
1136 | } else { |
1137 | // Check stack depth. If it's the deepest so far, record it. |
1138 | auto FrameAddrPtr = IRB.CreateIntrinsic( |
1139 | ID: Intrinsic::frameaddress, Types: IRB.getPtrTy(AddrSpace: DL.getAllocaAddrSpace()), |
1140 | Args: {Constant::getNullValue(Ty: Int32Ty)}); |
1141 | auto FrameAddrInt = IRB.CreatePtrToInt(V: FrameAddrPtr, DestTy: IntptrTy); |
1142 | auto LowestStack = IRB.CreateLoad(Ty: IntptrTy, Ptr: SanCovLowestStack); |
1143 | auto IsStackLower = IRB.CreateICmpULT(LHS: FrameAddrInt, RHS: LowestStack); |
1144 | auto ThenTerm = SplitBlockAndInsertIfThen( |
1145 | Cond: IsStackLower, SplitBefore: &*IP, Unreachable: false, |
1146 | BranchWeights: MDBuilder(IRB.getContext()).createUnlikelyBranchWeights()); |
1147 | IRBuilder<> ThenIRB(ThenTerm); |
1148 | auto Store = ThenIRB.CreateStore(Val: FrameAddrInt, Ptr: SanCovLowestStack); |
1149 | LowestStack->setNoSanitizeMetadata(); |
1150 | Store->setNoSanitizeMetadata(); |
1151 | } |
1152 | } |
1153 | } |
1154 | |
1155 | std::string |
1156 | ModuleSanitizerCoverage::getSectionName(const std::string &Section) const { |
1157 | if (TargetTriple.isOSBinFormatCOFF()) { |
1158 | if (Section == SanCovCountersSectionName) |
1159 | return ".SCOV$CM" ; |
1160 | if (Section == SanCovBoolFlagSectionName) |
1161 | return ".SCOV$BM" ; |
1162 | if (Section == SanCovPCsSectionName) |
1163 | return ".SCOVP$M" ; |
1164 | return ".SCOV$GM" ; // For SanCovGuardsSectionName. |
1165 | } |
1166 | if (TargetTriple.isOSBinFormatMachO()) |
1167 | return "__DATA,__" + Section; |
1168 | return "__" + Section; |
1169 | } |
1170 | |
1171 | std::string |
1172 | ModuleSanitizerCoverage::getSectionStart(const std::string &Section) const { |
1173 | if (TargetTriple.isOSBinFormatMachO()) |
1174 | return "\1section$start$__DATA$__" + Section; |
1175 | return "__start___" + Section; |
1176 | } |
1177 | |
1178 | std::string |
1179 | ModuleSanitizerCoverage::getSectionEnd(const std::string &Section) const { |
1180 | if (TargetTriple.isOSBinFormatMachO()) |
1181 | return "\1section$end$__DATA$__" + Section; |
1182 | return "__stop___" + Section; |
1183 | } |
1184 | |
1185 | void ModuleSanitizerCoverage::createFunctionControlFlow(Function &F) { |
1186 | SmallVector<Constant *, 32> CFs; |
1187 | IRBuilder<> IRB(&*F.getEntryBlock().getFirstInsertionPt()); |
1188 | |
1189 | for (auto &BB : F) { |
1190 | // blockaddress can not be used on function's entry block. |
1191 | if (&BB == &F.getEntryBlock()) |
1192 | CFs.push_back(Elt: (Constant *)IRB.CreatePointerCast(V: &F, DestTy: PtrTy)); |
1193 | else |
1194 | CFs.push_back( |
1195 | Elt: (Constant *)IRB.CreatePointerCast(V: BlockAddress::get(BB: &BB), DestTy: PtrTy)); |
1196 | |
1197 | for (auto SuccBB : successors(BB: &BB)) { |
1198 | assert(SuccBB != &F.getEntryBlock()); |
1199 | CFs.push_back( |
1200 | Elt: (Constant *)IRB.CreatePointerCast(V: BlockAddress::get(BB: SuccBB), DestTy: PtrTy)); |
1201 | } |
1202 | |
1203 | CFs.push_back(Elt: (Constant *)Constant::getNullValue(Ty: PtrTy)); |
1204 | |
1205 | for (auto &Inst : BB) { |
1206 | if (CallBase *CB = dyn_cast<CallBase>(Val: &Inst)) { |
1207 | if (CB->isIndirectCall()) { |
1208 | // TODO(navidem): handle indirect calls, for now mark its existence. |
1209 | CFs.push_back(Elt: (Constant *)IRB.CreateIntToPtr( |
1210 | V: ConstantInt::get(Ty: IntptrTy, V: -1), DestTy: PtrTy)); |
1211 | } else { |
1212 | auto CalledF = CB->getCalledFunction(); |
1213 | if (CalledF && !CalledF->isIntrinsic()) |
1214 | CFs.push_back(Elt: (Constant *)IRB.CreatePointerCast(V: CalledF, DestTy: PtrTy)); |
1215 | } |
1216 | } |
1217 | } |
1218 | |
1219 | CFs.push_back(Elt: (Constant *)Constant::getNullValue(Ty: PtrTy)); |
1220 | } |
1221 | |
1222 | FunctionCFsArray = CreateFunctionLocalArrayInSection(NumElements: CFs.size(), F, Ty: PtrTy, |
1223 | Section: SanCovCFsSectionName); |
1224 | FunctionCFsArray->setInitializer( |
1225 | ConstantArray::get(T: ArrayType::get(ElementType: PtrTy, NumElements: CFs.size()), V: CFs)); |
1226 | FunctionCFsArray->setConstant(true); |
1227 | } |
1228 | |