1//===- ModuleInliner.cpp - Code related to module inliner -----------------===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8//
9// This file implements the mechanics required to implement inlining without
10// missing any calls in the module level. It doesn't need any infromation about
11// SCC or call graph, which is different from the SCC inliner. The decisions of
12// which calls are profitable to inline are implemented elsewhere.
13//
14//===----------------------------------------------------------------------===//
15
16#include "llvm/Transforms/IPO/ModuleInliner.h"
17#include "llvm/ADT/ScopeExit.h"
18#include "llvm/ADT/SmallVector.h"
19#include "llvm/ADT/Statistic.h"
20#include "llvm/Analysis/AliasAnalysis.h"
21#include "llvm/Analysis/AssumptionCache.h"
22#include "llvm/Analysis/BlockFrequencyInfo.h"
23#include "llvm/Analysis/CtxProfAnalysis.h"
24#include "llvm/Analysis/InlineAdvisor.h"
25#include "llvm/Analysis/InlineCost.h"
26#include "llvm/Analysis/InlineOrder.h"
27#include "llvm/Analysis/OptimizationRemarkEmitter.h"
28#include "llvm/Analysis/ProfileSummaryInfo.h"
29#include "llvm/Analysis/ReplayInlineAdvisor.h"
30#include "llvm/Analysis/TargetLibraryInfo.h"
31#include "llvm/IR/DiagnosticInfo.h"
32#include "llvm/IR/Function.h"
33#include "llvm/IR/InstIterator.h"
34#include "llvm/IR/Instruction.h"
35#include "llvm/IR/IntrinsicInst.h"
36#include "llvm/IR/Module.h"
37#include "llvm/IR/PassManager.h"
38#include "llvm/Support/CommandLine.h"
39#include "llvm/Support/Debug.h"
40#include "llvm/Support/raw_ostream.h"
41#include "llvm/Transforms/Utils/CallPromotionUtils.h"
42#include "llvm/Transforms/Utils/Cloning.h"
43#include <cassert>
44
45using namespace llvm;
46
47#define DEBUG_TYPE "module-inline"
48
49STATISTIC(NumInlined, "Number of functions inlined");
50STATISTIC(NumDeleted, "Number of functions deleted because all callers found");
51
52static cl::opt<bool> CtxProfPromoteAlwaysInline(
53 "ctx-prof-promote-alwaysinline", cl::init(Val: false), cl::Hidden,
54 cl::desc("If using a contextual profile in this module, and an indirect "
55 "call target is marked as alwaysinline, perform indirect call "
56 "promotion for that target. If multiple targets for an indirect "
57 "call site fit this description, they are all promoted."));
58
59/// Return true if the specified inline history ID
60/// indicates an inline history that includes the specified function.
61static bool inlineHistoryIncludes(
62 Function *F, int InlineHistoryID,
63 const SmallVectorImpl<std::pair<Function *, int>> &InlineHistory) {
64 while (InlineHistoryID != -1) {
65 assert(unsigned(InlineHistoryID) < InlineHistory.size() &&
66 "Invalid inline history ID");
67 if (InlineHistory[InlineHistoryID].first == F)
68 return true;
69 InlineHistoryID = InlineHistory[InlineHistoryID].second;
70 }
71 return false;
72}
73
74InlineAdvisor &ModuleInlinerPass::getAdvisor(const ModuleAnalysisManager &MAM,
75 FunctionAnalysisManager &FAM,
76 Module &M) {
77 if (OwnedAdvisor)
78 return *OwnedAdvisor;
79
80 auto *IAA = MAM.getCachedResult<InlineAdvisorAnalysis>(IR&: M);
81 if (!IAA) {
82 // It should still be possible to run the inliner as a stand-alone module
83 // pass, for test scenarios. In that case, we default to the
84 // DefaultInlineAdvisor, which doesn't need to keep state between module
85 // pass runs. It also uses just the default InlineParams. In this case, we
86 // need to use the provided FAM, which is valid for the duration of the
87 // inliner pass, and thus the lifetime of the owned advisor. The one we
88 // would get from the MAM can be invalidated as a result of the inliner's
89 // activity.
90 OwnedAdvisor = std::make_unique<DefaultInlineAdvisor>(
91 args&: M, args&: FAM, args: Params, args: InlineContext{.LTOPhase: LTOPhase, .Pass: InlinePass::ModuleInliner});
92
93 return *OwnedAdvisor;
94 }
95 assert(IAA->getAdvisor() &&
96 "Expected a present InlineAdvisorAnalysis also have an "
97 "InlineAdvisor initialized");
98 return *IAA->getAdvisor();
99}
100
101static bool isKnownLibFunction(Function &F, TargetLibraryInfo &TLI) {
102 LibFunc LF;
103
104 // Either this is a normal library function or a "vectorizable"
105 // function. Not using the VFDatabase here because this query
106 // is related only to libraries handled via the TLI.
107 return TLI.getLibFunc(FDecl: F, F&: LF) ||
108 TLI.isKnownVectorFunctionInLibrary(F: F.getName());
109}
110
111PreservedAnalyses ModuleInlinerPass::run(Module &M,
112 ModuleAnalysisManager &MAM) {
113 LLVM_DEBUG(dbgs() << "---- Module Inliner is Running ---- \n");
114
115 auto &IAA = MAM.getResult<InlineAdvisorAnalysis>(IR&: M);
116 if (!IAA.tryCreate(Params, Mode, ReplaySettings: {},
117 IC: InlineContext{.LTOPhase: LTOPhase, .Pass: InlinePass::ModuleInliner})) {
118 M.getContext().emitError(
119 ErrorStr: "Could not setup Inlining Advisor for the requested "
120 "mode and/or options");
121 return PreservedAnalyses::all();
122 }
123
124 auto &CtxProf = MAM.getResult<CtxProfAnalysis>(IR&: M);
125
126 bool Changed = false;
127
128 ProfileSummaryInfo *PSI = MAM.getCachedResult<ProfileSummaryAnalysis>(IR&: M);
129
130 FunctionAnalysisManager &FAM =
131 MAM.getResult<FunctionAnalysisManagerModuleProxy>(IR&: M).getManager();
132
133 auto GetTLI = [&FAM](Function &F) -> TargetLibraryInfo & {
134 return FAM.getResult<TargetLibraryAnalysis>(IR&: F);
135 };
136
137 InlineAdvisor &Advisor = getAdvisor(MAM, FAM, M);
138 Advisor.onPassEntry();
139
140 auto AdvisorOnExit = make_scope_exit(F: [&] { Advisor.onPassExit(); });
141
142 // In the module inliner, a priority-based worklist is used for calls across
143 // the entire Module. With this module inliner, the inline order is not
144 // limited to bottom-up order. More globally scope inline order is enabled.
145 // Also, the inline deferral logic become unnecessary in this module inliner.
146 // It is possible to use other priority heuristics, e.g. profile-based
147 // heuristic.
148 //
149 // TODO: Here is a huge amount duplicate code between the module inliner and
150 // the SCC inliner, which need some refactoring.
151 auto Calls = getInlineOrder(FAM, Params, MAM, M);
152 assert(Calls != nullptr && "Expected an initialized InlineOrder");
153
154 // Populate the initial list of calls in this module.
155 SetVector<std::pair<CallBase *, Function *>> ICPCandidates;
156 for (Function &F : M) {
157 auto &ORE = FAM.getResult<OptimizationRemarkEmitterAnalysis>(IR&: F);
158 for (Instruction &I : instructions(F)) {
159 if (auto *CB = dyn_cast<CallBase>(Val: &I)) {
160 if (Function *Callee = CB->getCalledFunction()) {
161 if (!Callee->isDeclaration())
162 Calls->push(Elt: {CB, -1});
163 else if (!isa<IntrinsicInst>(Val: I)) {
164 using namespace ore;
165 setInlineRemark(CB&: *CB, Message: "unavailable definition");
166 ORE.emit(RemarkBuilder: [&]() {
167 return OptimizationRemarkMissed(DEBUG_TYPE, "NoDefinition", &I)
168 << NV("Callee", Callee) << " will not be inlined into "
169 << NV("Caller", CB->getCaller())
170 << " because its definition is unavailable"
171 << setIsVerbose();
172 });
173 }
174 } else if (CtxProfPromoteAlwaysInline &&
175 CtxProf.isInSpecializedModule() && CB->isIndirectCall()) {
176 CtxProfAnalysis::collectIndirectCallPromotionList(IC&: *CB, Profile&: CtxProf,
177 Candidates&: ICPCandidates);
178 }
179 }
180 }
181 }
182 for (auto &[CB, Target] : ICPCandidates) {
183 if (auto *DirectCB = promoteCallWithIfThenElse(CB&: *CB, Callee&: *Target, CtxProf))
184 Calls->push(Elt: {DirectCB, -1});
185 }
186 if (Calls->empty())
187 return PreservedAnalyses::all();
188
189 // When inlining a callee produces new call sites, we want to keep track of
190 // the fact that they were inlined from the callee. This allows us to avoid
191 // infinite inlining in some obscure cases. To represent this, we use an
192 // index into the InlineHistory vector.
193 SmallVector<std::pair<Function *, int>, 16> InlineHistory;
194
195 // Track the dead functions to delete once finished with inlining calls. We
196 // defer deleting these to make it easier to handle the call graph updates.
197 SmallVector<Function *, 4> DeadFunctions;
198
199 // Loop forward over all of the calls.
200 while (!Calls->empty()) {
201 auto P = Calls->pop();
202 CallBase *CB = P.first;
203 const int InlineHistoryID = P.second;
204 Function &F = *CB->getCaller();
205 Function &Callee = *CB->getCalledFunction();
206
207 LLVM_DEBUG(dbgs() << "Inlining calls in: " << F.getName() << "\n"
208 << " Function size: " << F.getInstructionCount()
209 << "\n");
210 (void)F;
211
212 auto GetAssumptionCache = [&](Function &F) -> AssumptionCache & {
213 return FAM.getResult<AssumptionAnalysis>(IR&: F);
214 };
215
216 if (InlineHistoryID != -1 &&
217 inlineHistoryIncludes(F: &Callee, InlineHistoryID, InlineHistory)) {
218 setInlineRemark(CB&: *CB, Message: "recursive");
219 continue;
220 }
221
222 auto Advice = Advisor.getAdvice(CB&: *CB, /*OnlyMandatory*/ MandatoryOnly: false);
223 // Check whether we want to inline this callsite.
224 if (!Advice->isInliningRecommended()) {
225 Advice->recordUnattemptedInlining();
226 continue;
227 }
228
229 // Setup the data structure used to plumb customization into the
230 // `InlineFunction` routine.
231 InlineFunctionInfo IFI(
232 GetAssumptionCache, PSI,
233 &FAM.getResult<BlockFrequencyAnalysis>(IR&: *(CB->getCaller())),
234 &FAM.getResult<BlockFrequencyAnalysis>(IR&: Callee));
235
236 InlineResult IR =
237 InlineFunction(CB&: *CB, IFI, CtxProf, /*MergeAttributes=*/true,
238 CalleeAAR: &FAM.getResult<AAManager>(IR&: *CB->getCaller()));
239 if (!IR.isSuccess()) {
240 Advice->recordUnsuccessfulInlining(Result: IR);
241 continue;
242 }
243
244 Changed = true;
245 ++NumInlined;
246
247 LLVM_DEBUG(dbgs() << " Size after inlining: " << F.getInstructionCount()
248 << "\n");
249
250 // Add any new callsites to defined functions to the worklist.
251 if (!IFI.InlinedCallSites.empty()) {
252 int NewHistoryID = InlineHistory.size();
253 InlineHistory.push_back(Elt: {&Callee, InlineHistoryID});
254
255 for (CallBase *ICB : reverse(C&: IFI.InlinedCallSites)) {
256 Function *NewCallee = ICB->getCalledFunction();
257 if (!NewCallee) {
258 // Try to promote an indirect (virtual) call without waiting for
259 // the post-inline cleanup and the next DevirtSCCRepeatedPass
260 // iteration because the next iteration may not happen and we may
261 // miss inlining it.
262 // FIXME: enable for ctxprof.
263 if (CtxProf.isInSpecializedModule())
264 if (tryPromoteCall(CB&: *ICB))
265 NewCallee = ICB->getCalledFunction();
266 }
267 if (NewCallee)
268 if (!NewCallee->isDeclaration())
269 Calls->push(Elt: {ICB, NewHistoryID});
270 }
271 }
272
273 // For local functions, check whether this makes the callee trivially
274 // dead. In that case, we can drop the body of the function eagerly
275 // which may reduce the number of callers of other functions to one,
276 // changing inline cost thresholds.
277 bool CalleeWasDeleted = false;
278 if (Callee.hasLocalLinkage()) {
279 // To check this we also need to nuke any dead constant uses (perhaps
280 // made dead by this operation on other functions).
281 Callee.removeDeadConstantUsers();
282 // if (Callee.use_empty() && !CG.isLibFunction(Callee)) {
283 if (Callee.use_empty() && !isKnownLibFunction(F&: Callee, TLI&: GetTLI(Callee))) {
284 Calls->erase_if(Pred: [&](const std::pair<CallBase *, int> &Call) {
285 return Call.first->getCaller() == &Callee;
286 });
287 // Clear the body and queue the function itself for deletion when we
288 // finish inlining.
289 // Note that after this point, it is an error to do anything other
290 // than use the callee's address or delete it.
291 Callee.dropAllReferences();
292 assert(!is_contained(DeadFunctions, &Callee) &&
293 "Cannot put cause a function to become dead twice!");
294 DeadFunctions.push_back(Elt: &Callee);
295 CalleeWasDeleted = true;
296 }
297 }
298 if (CalleeWasDeleted)
299 Advice->recordInliningWithCalleeDeleted();
300 else
301 Advice->recordInlining();
302 }
303
304 // Now that we've finished inlining all of the calls across this module,
305 // delete all of the trivially dead functions.
306 //
307 // Note that this walks a pointer set which has non-deterministic order but
308 // that is OK as all we do is delete things and add pointers to unordered
309 // sets.
310 for (Function *DeadF : DeadFunctions) {
311 // Clear out any cached analyses.
312 FAM.clear(IR&: *DeadF, Name: DeadF->getName());
313
314 // And delete the actual function from the module.
315 M.getFunctionList().erase(IT: DeadF);
316
317 ++NumDeleted;
318 }
319
320 if (!Changed)
321 return PreservedAnalyses::all();
322
323 return PreservedAnalyses::none();
324}
325