1//===- VarLocBasedImpl.cpp - Tracking Debug Value MIs with VarLoc class----===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8///
9/// \file VarLocBasedImpl.cpp
10///
11/// LiveDebugValues is an optimistic "available expressions" dataflow
12/// algorithm. The set of expressions is the set of machine locations
13/// (registers, spill slots, constants, and target indices) that a variable
14/// fragment might be located, qualified by a DIExpression and indirect-ness
15/// flag, while each variable is identified by a DebugVariable object. The
16/// availability of an expression begins when a DBG_VALUE instruction specifies
17/// the location of a DebugVariable, and continues until that location is
18/// clobbered or re-specified by a different DBG_VALUE for the same
19/// DebugVariable.
20///
21/// The output of LiveDebugValues is additional DBG_VALUE instructions,
22/// placed to extend variable locations as far they're available. This file
23/// and the VarLocBasedLDV class is an implementation that explicitly tracks
24/// locations, using the VarLoc class.
25///
26/// The canonical "available expressions" problem doesn't have expression
27/// clobbering, instead when a variable is re-assigned, any expressions using
28/// that variable get invalidated. LiveDebugValues can map onto "available
29/// expressions" by having every register represented by a variable, which is
30/// used in an expression that becomes available at a DBG_VALUE instruction.
31/// When the register is clobbered, its variable is effectively reassigned, and
32/// expressions computed from it become unavailable. A similar construct is
33/// needed when a DebugVariable has its location re-specified, to invalidate
34/// all other locations for that DebugVariable.
35///
36/// Using the dataflow analysis to compute the available expressions, we create
37/// a DBG_VALUE at the beginning of each block where the expression is
38/// live-in. This propagates variable locations into every basic block where
39/// the location can be determined, rather than only having DBG_VALUEs in blocks
40/// where locations are specified due to an assignment or some optimization.
41/// Movements of values between registers and spill slots are annotated with
42/// DBG_VALUEs too to track variable values bewteen locations. All this allows
43/// DbgEntityHistoryCalculator to focus on only the locations within individual
44/// blocks, facilitating testing and improving modularity.
45///
46/// We follow an optimisic dataflow approach, with this lattice:
47///
48/// \verbatim
49/// ┬ "Unknown"
50/// |
51/// v
52/// True
53/// |
54/// v
55/// ⊥ False
56/// \endverbatim With "True" signifying that the expression is available (and
57/// thus a DebugVariable's location is the corresponding register), while
58/// "False" signifies that the expression is unavailable. "Unknown"s never
59/// survive to the end of the analysis (see below).
60///
61/// Formally, all DebugVariable locations that are live-out of a block are
62/// initialized to \top. A blocks live-in values take the meet of the lattice
63/// value for every predecessors live-outs, except for the entry block, where
64/// all live-ins are \bot. The usual dataflow propagation occurs: the transfer
65/// function for a block assigns an expression for a DebugVariable to be "True"
66/// if a DBG_VALUE in the block specifies it; "False" if the location is
67/// clobbered; or the live-in value if it is unaffected by the block. We
68/// visit each block in reverse post order until a fixedpoint is reached. The
69/// solution produced is maximal.
70///
71/// Intuitively, we start by assuming that every expression / variable location
72/// is at least "True", and then propagate "False" from the entry block and any
73/// clobbers until there are no more changes to make. This gives us an accurate
74/// solution because all incorrect locations will have a "False" propagated into
75/// them. It also gives us a solution that copes well with loops by assuming
76/// that variable locations are live-through every loop, and then removing those
77/// that are not through dataflow.
78///
79/// Within LiveDebugValues: each variable location is represented by a
80/// VarLoc object that identifies the source variable, the set of
81/// machine-locations that currently describe it (a single location for
82/// DBG_VALUE or multiple for DBG_VALUE_LIST), and the DBG_VALUE inst that
83/// specifies the location. Each VarLoc is indexed in the (function-scope) \p
84/// VarLocMap, giving each VarLoc a set of unique indexes, each of which
85/// corresponds to one of the VarLoc's machine-locations and can be used to
86/// lookup the VarLoc in the VarLocMap. Rather than operate directly on machine
87/// locations, the dataflow analysis in this pass identifies locations by their
88/// indices in the VarLocMap, meaning all the variable locations in a block can
89/// be described by a sparse vector of VarLocMap indices.
90///
91/// All the storage for the dataflow analysis is local to the ExtendRanges
92/// method and passed down to helper methods. "OutLocs" and "InLocs" record the
93/// in and out lattice values for each block. "OpenRanges" maintains a list of
94/// variable locations and, with the "process" method, evaluates the transfer
95/// function of each block. "flushPendingLocs" installs debug value instructions
96/// for each live-in location at the start of blocks, while "Transfers" records
97/// transfers of values between machine-locations.
98///
99/// We avoid explicitly representing the "Unknown" (\top) lattice value in the
100/// implementation. Instead, unvisited blocks implicitly have all lattice
101/// values set as "Unknown". After being visited, there will be path back to
102/// the entry block where the lattice value is "False", and as the transfer
103/// function cannot make new "Unknown" locations, there are no scenarios where
104/// a block can have an "Unknown" location after being visited. Similarly, we
105/// don't enumerate all possible variable locations before exploring the
106/// function: when a new location is discovered, all blocks previously explored
107/// were implicitly "False" but unrecorded, and become explicitly "False" when
108/// a new VarLoc is created with its bit not set in predecessor InLocs or
109/// OutLocs.
110///
111//===----------------------------------------------------------------------===//
112
113#include "LiveDebugValues.h"
114
115#include "llvm/ADT/CoalescingBitVector.h"
116#include "llvm/ADT/DenseMap.h"
117#include "llvm/ADT/PostOrderIterator.h"
118#include "llvm/ADT/SmallPtrSet.h"
119#include "llvm/ADT/SmallSet.h"
120#include "llvm/ADT/SmallVector.h"
121#include "llvm/ADT/Statistic.h"
122#include "llvm/BinaryFormat/Dwarf.h"
123#include "llvm/CodeGen/LexicalScopes.h"
124#include "llvm/CodeGen/MachineBasicBlock.h"
125#include "llvm/CodeGen/MachineFunction.h"
126#include "llvm/CodeGen/MachineInstr.h"
127#include "llvm/CodeGen/MachineInstrBuilder.h"
128#include "llvm/CodeGen/MachineMemOperand.h"
129#include "llvm/CodeGen/MachineOperand.h"
130#include "llvm/CodeGen/PseudoSourceValue.h"
131#include "llvm/CodeGen/TargetFrameLowering.h"
132#include "llvm/CodeGen/TargetInstrInfo.h"
133#include "llvm/CodeGen/TargetLowering.h"
134#include "llvm/CodeGen/TargetRegisterInfo.h"
135#include "llvm/CodeGen/TargetSubtargetInfo.h"
136#include "llvm/Config/llvm-config.h"
137#include "llvm/IR/DebugInfoMetadata.h"
138#include "llvm/IR/DebugLoc.h"
139#include "llvm/IR/Function.h"
140#include "llvm/MC/MCRegisterInfo.h"
141#include "llvm/Support/Casting.h"
142#include "llvm/Support/Debug.h"
143#include "llvm/Support/TypeSize.h"
144#include "llvm/Support/raw_ostream.h"
145#include "llvm/Target/TargetMachine.h"
146#include <cassert>
147#include <cstdint>
148#include <functional>
149#include <map>
150#include <optional>
151#include <queue>
152#include <tuple>
153#include <utility>
154#include <vector>
155
156using namespace llvm;
157
158#define DEBUG_TYPE "livedebugvalues"
159
160STATISTIC(NumInserted, "Number of DBG_VALUE instructions inserted");
161
162/// If \p Op is a stack or frame register return true, otherwise return false.
163/// This is used to avoid basing the debug entry values on the registers, since
164/// we do not support it at the moment.
165static bool isRegOtherThanSPAndFP(const MachineOperand &Op,
166 const MachineInstr &MI,
167 const TargetRegisterInfo *TRI) {
168 if (!Op.isReg())
169 return false;
170
171 const MachineFunction *MF = MI.getParent()->getParent();
172 const TargetLowering *TLI = MF->getSubtarget().getTargetLowering();
173 Register SP = TLI->getStackPointerRegisterToSaveRestore();
174 Register FP = TRI->getFrameRegister(MF: *MF);
175 Register Reg = Op.getReg();
176
177 return Reg && Reg != SP && Reg != FP;
178}
179
180namespace {
181
182// Max out the number of statically allocated elements in DefinedRegsSet, as
183// this prevents fallback to std::set::count() operations.
184using DefinedRegsSet = SmallSet<Register, 32>;
185
186// The IDs in this set correspond to MachineLocs in VarLocs, as well as VarLocs
187// that represent Entry Values; every VarLoc in the set will also appear
188// exactly once at Location=0.
189// As a result, each VarLoc may appear more than once in this "set", but each
190// range corresponding to a Reg, SpillLoc, or EntryValue type will still be a
191// "true" set (i.e. each VarLoc may appear only once), and the range Location=0
192// is the set of all VarLocs.
193using VarLocSet = CoalescingBitVector<uint64_t>;
194
195/// A type-checked pair of {Register Location (or 0), Index}, used to index
196/// into a \ref VarLocMap. This can be efficiently converted to a 64-bit int
197/// for insertion into a \ref VarLocSet, and efficiently converted back. The
198/// type-checker helps ensure that the conversions aren't lossy.
199///
200/// Why encode a location /into/ the VarLocMap index? This makes it possible
201/// to find the open VarLocs killed by a register def very quickly. This is a
202/// performance-critical operation for LiveDebugValues.
203struct LocIndex {
204 using u32_location_t = uint32_t;
205 using u32_index_t = uint32_t;
206
207 u32_location_t Location; // Physical registers live in the range [1;2^30) (see
208 // \ref MCRegister), so we have plenty of range left
209 // here to encode non-register locations.
210 u32_index_t Index;
211
212 /// The location that has an entry for every VarLoc in the map.
213 static constexpr u32_location_t kUniversalLocation = 0;
214
215 /// The first location that is reserved for VarLocs with locations of kind
216 /// RegisterKind.
217 static constexpr u32_location_t kFirstRegLocation = 1;
218
219 /// The first location greater than 0 that is not reserved for VarLocs with
220 /// locations of kind RegisterKind.
221 static constexpr u32_location_t kFirstInvalidRegLocation = 1 << 30;
222
223 /// A special location reserved for VarLocs with locations of kind
224 /// SpillLocKind.
225 static constexpr u32_location_t kSpillLocation = kFirstInvalidRegLocation;
226
227 /// A special location reserved for VarLocs of kind EntryValueBackupKind and
228 /// EntryValueCopyBackupKind.
229 static constexpr u32_location_t kEntryValueBackupLocation =
230 kFirstInvalidRegLocation + 1;
231
232 /// A special location reserved for VarLocs with locations of kind
233 /// WasmLocKind.
234 /// TODO Placing all Wasm target index locations in this single kWasmLocation
235 /// may cause slowdown in compilation time in very large functions. Consider
236 /// giving a each target index/offset pair its own u32_location_t if this
237 /// becomes a problem.
238 static constexpr u32_location_t kWasmLocation = kFirstInvalidRegLocation + 2;
239
240 /// The first location that is reserved for VarLocs with locations of kind
241 /// VirtualRegisterKind.
242 static constexpr u32_location_t kFirstVirtualRegLocation = 1 << 31;
243
244 LocIndex(u32_location_t Location, u32_index_t Index)
245 : Location(Location), Index(Index) {}
246
247 uint64_t getAsRawInteger() const {
248 return (static_cast<uint64_t>(Location) << 32) | Index;
249 }
250
251 template<typename IntT> static LocIndex fromRawInteger(IntT ID) {
252 static_assert(std::is_unsigned_v<IntT> && sizeof(ID) == sizeof(uint64_t),
253 "Cannot convert raw integer to LocIndex");
254 return {static_cast<u32_location_t>(ID >> 32),
255 static_cast<u32_index_t>(ID)};
256 }
257
258 /// Get the start of the interval reserved for VarLocs of kind RegisterKind
259 /// which reside in \p Reg. The end is at rawIndexForReg(Reg+1)-1.
260 static uint64_t rawIndexForReg(Register Reg) {
261 return LocIndex(Reg, 0).getAsRawInteger();
262 }
263
264 /// Return a range covering all set indices in the interval reserved for
265 /// \p Location in \p Set.
266 static auto indexRangeForLocation(const VarLocSet &Set,
267 u32_location_t Location) {
268 uint64_t Start = LocIndex(Location, 0).getAsRawInteger();
269 uint64_t End = LocIndex(Location + 1, 0).getAsRawInteger();
270 return Set.half_open_range(Start, End);
271 }
272};
273
274// Simple Set for storing all the VarLoc Indices at a Location bucket.
275using VarLocsInRange = SmallSet<LocIndex::u32_index_t, 32>;
276// Vector of all `LocIndex`s for a given VarLoc; the same Location should not
277// appear in any two of these, as each VarLoc appears at most once in any
278// Location bucket.
279using LocIndices = SmallVector<LocIndex, 2>;
280
281class VarLocBasedLDV : public LDVImpl {
282private:
283 const TargetRegisterInfo *TRI;
284 const TargetInstrInfo *TII;
285 const TargetFrameLowering *TFI;
286 bool ShouldEmitDebugEntryValues;
287 BitVector CalleeSavedRegs;
288 LexicalScopes LS;
289 VarLocSet::Allocator Alloc;
290
291 const MachineInstr *LastNonDbgMI;
292
293 enum struct TransferKind { TransferCopy, TransferSpill, TransferRestore };
294
295 using FragmentInfo = DIExpression::FragmentInfo;
296 using OptFragmentInfo = std::optional<DIExpression::FragmentInfo>;
297
298 /// A pair of debug variable and value location.
299 struct VarLoc {
300 // The location at which a spilled variable resides. It consists of a
301 // register and an offset.
302 struct SpillLoc {
303 unsigned SpillBase;
304 StackOffset SpillOffset;
305 bool operator==(const SpillLoc &Other) const {
306 return SpillBase == Other.SpillBase && SpillOffset == Other.SpillOffset;
307 }
308 bool operator!=(const SpillLoc &Other) const {
309 return !(*this == Other);
310 }
311 };
312
313 // Target indices used for wasm-specific locations.
314 struct WasmLoc {
315 // One of TargetIndex values defined in WebAssembly.h. We deal with
316 // local-related TargetIndex in this analysis (TI_LOCAL and
317 // TI_LOCAL_INDIRECT). Stack operands (TI_OPERAND_STACK) will be handled
318 // separately WebAssemblyDebugFixup pass, and we don't associate debug
319 // info with values in global operands (TI_GLOBAL_RELOC) at the moment.
320 int Index;
321 int64_t Offset;
322 bool operator==(const WasmLoc &Other) const {
323 return Index == Other.Index && Offset == Other.Offset;
324 }
325 bool operator!=(const WasmLoc &Other) const { return !(*this == Other); }
326 };
327
328 /// Identity of the variable at this location.
329 const DebugVariable Var;
330
331 /// The expression applied to this location.
332 const DIExpression *Expr;
333
334 /// DBG_VALUE to clone var/expr information from if this location
335 /// is moved.
336 const MachineInstr &MI;
337
338 enum class MachineLocKind {
339 InvalidKind = 0,
340 RegisterKind,
341 SpillLocKind,
342 ImmediateKind,
343 WasmLocKind
344 };
345
346 enum class EntryValueLocKind {
347 NonEntryValueKind = 0,
348 EntryValueKind,
349 EntryValueBackupKind,
350 EntryValueCopyBackupKind
351 } EVKind = EntryValueLocKind::NonEntryValueKind;
352
353 /// The value location. Stored separately to avoid repeatedly
354 /// extracting it from MI.
355 union MachineLocValue {
356 uint64_t RegNo;
357 SpillLoc SpillLocation;
358 uint64_t Hash;
359 int64_t Immediate;
360 const ConstantFP *FPImm;
361 const ConstantInt *CImm;
362 WasmLoc WasmLocation;
363 MachineLocValue() : Hash(0) {}
364 };
365
366 /// A single machine location; its Kind is either a register, spill
367 /// location, or immediate value.
368 /// If the VarLoc is not a NonEntryValueKind, then it will use only a
369 /// single MachineLoc of RegisterKind.
370 struct MachineLoc {
371 MachineLocKind Kind;
372 MachineLocValue Value;
373 bool operator==(const MachineLoc &Other) const {
374 if (Kind != Other.Kind)
375 return false;
376 switch (Kind) {
377 case MachineLocKind::SpillLocKind:
378 return Value.SpillLocation == Other.Value.SpillLocation;
379 case MachineLocKind::WasmLocKind:
380 return Value.WasmLocation == Other.Value.WasmLocation;
381 case MachineLocKind::RegisterKind:
382 case MachineLocKind::ImmediateKind:
383 return Value.Hash == Other.Value.Hash;
384 default:
385 llvm_unreachable("Invalid kind");
386 }
387 }
388 bool operator<(const MachineLoc &Other) const {
389 switch (Kind) {
390 case MachineLocKind::SpillLocKind:
391 return std::make_tuple(
392 args: Kind, args: Value.SpillLocation.SpillBase,
393 args: Value.SpillLocation.SpillOffset.getFixed(),
394 args: Value.SpillLocation.SpillOffset.getScalable()) <
395 std::make_tuple(
396 args: Other.Kind, args: Other.Value.SpillLocation.SpillBase,
397 args: Other.Value.SpillLocation.SpillOffset.getFixed(),
398 args: Other.Value.SpillLocation.SpillOffset.getScalable());
399 case MachineLocKind::WasmLocKind:
400 return std::make_tuple(args: Kind, args: Value.WasmLocation.Index,
401 args: Value.WasmLocation.Offset) <
402 std::make_tuple(args: Other.Kind, args: Other.Value.WasmLocation.Index,
403 args: Other.Value.WasmLocation.Offset);
404 case MachineLocKind::RegisterKind:
405 case MachineLocKind::ImmediateKind:
406 return std::tie(args: Kind, args: Value.Hash) <
407 std::tie(args: Other.Kind, args: Other.Value.Hash);
408 default:
409 llvm_unreachable("Invalid kind");
410 }
411 }
412 };
413
414 /// The set of machine locations used to determine the variable's value, in
415 /// conjunction with Expr. Initially populated with MI's debug operands,
416 /// but may be transformed independently afterwards.
417 SmallVector<MachineLoc, 8> Locs;
418 /// Used to map the index of each location in Locs back to the index of its
419 /// original debug operand in MI. Used when multiple location operands are
420 /// coalesced and the original MI's operands need to be accessed while
421 /// emitting a debug value.
422 SmallVector<unsigned, 8> OrigLocMap;
423
424 VarLoc(const MachineInstr &MI)
425 : Var(MI.getDebugVariable(), MI.getDebugExpression(),
426 MI.getDebugLoc()->getInlinedAt()),
427 Expr(MI.getDebugExpression()), MI(MI) {
428 assert(MI.isDebugValue() && "not a DBG_VALUE");
429 assert((MI.isDebugValueList() || MI.getNumOperands() == 4) &&
430 "malformed DBG_VALUE");
431 for (const MachineOperand &Op : MI.debug_operands()) {
432 MachineLoc ML = GetLocForOp(Op);
433 auto It = find(Range&: Locs, Val: ML);
434 if (It == Locs.end()) {
435 Locs.push_back(Elt: ML);
436 OrigLocMap.push_back(Elt: MI.getDebugOperandIndex(Op: &Op));
437 } else {
438 // ML duplicates an element in Locs; replace references to Op
439 // with references to the duplicating element.
440 unsigned OpIdx = Locs.size();
441 unsigned DuplicatingIdx = std::distance(first: Locs.begin(), last: It);
442 Expr = DIExpression::replaceArg(Expr, OldArg: OpIdx, NewArg: DuplicatingIdx);
443 }
444 }
445
446 // We create the debug entry values from the factory functions rather
447 // than from this ctor.
448 assert(EVKind != EntryValueLocKind::EntryValueKind &&
449 !isEntryBackupLoc());
450 }
451
452 static MachineLoc GetLocForOp(const MachineOperand &Op) {
453 MachineLocKind Kind;
454 MachineLocValue Loc;
455 if (Op.isReg()) {
456 Kind = MachineLocKind::RegisterKind;
457 Loc.RegNo = Op.getReg();
458 } else if (Op.isImm()) {
459 Kind = MachineLocKind::ImmediateKind;
460 Loc.Immediate = Op.getImm();
461 } else if (Op.isFPImm()) {
462 Kind = MachineLocKind::ImmediateKind;
463 Loc.FPImm = Op.getFPImm();
464 } else if (Op.isCImm()) {
465 Kind = MachineLocKind::ImmediateKind;
466 Loc.CImm = Op.getCImm();
467 } else if (Op.isTargetIndex()) {
468 Kind = MachineLocKind::WasmLocKind;
469 Loc.WasmLocation = {.Index: Op.getIndex(), .Offset: Op.getOffset()};
470 } else
471 llvm_unreachable("Invalid Op kind for MachineLoc.");
472 return {.Kind: Kind, .Value: Loc};
473 }
474
475 /// Take the variable and machine-location in DBG_VALUE MI, and build an
476 /// entry location using the given expression.
477 static VarLoc CreateEntryLoc(const MachineInstr &MI,
478 const DIExpression *EntryExpr, Register Reg) {
479 VarLoc VL(MI);
480 assert(VL.Locs.size() == 1 &&
481 VL.Locs[0].Kind == MachineLocKind::RegisterKind);
482 VL.EVKind = EntryValueLocKind::EntryValueKind;
483 VL.Expr = EntryExpr;
484 VL.Locs[0].Value.RegNo = Reg;
485 return VL;
486 }
487
488 /// Take the variable and machine-location from the DBG_VALUE (from the
489 /// function entry), and build an entry value backup location. The backup
490 /// location will turn into the normal location if the backup is valid at
491 /// the time of the primary location clobbering.
492 static VarLoc CreateEntryBackupLoc(const MachineInstr &MI,
493 const DIExpression *EntryExpr) {
494 VarLoc VL(MI);
495 assert(VL.Locs.size() == 1 &&
496 VL.Locs[0].Kind == MachineLocKind::RegisterKind);
497 VL.EVKind = EntryValueLocKind::EntryValueBackupKind;
498 VL.Expr = EntryExpr;
499 return VL;
500 }
501
502 /// Take the variable and machine-location from the DBG_VALUE (from the
503 /// function entry), and build a copy of an entry value backup location by
504 /// setting the register location to NewReg.
505 static VarLoc CreateEntryCopyBackupLoc(const MachineInstr &MI,
506 const DIExpression *EntryExpr,
507 Register NewReg) {
508 VarLoc VL(MI);
509 assert(VL.Locs.size() == 1 &&
510 VL.Locs[0].Kind == MachineLocKind::RegisterKind);
511 VL.EVKind = EntryValueLocKind::EntryValueCopyBackupKind;
512 VL.Expr = EntryExpr;
513 VL.Locs[0].Value.RegNo = NewReg;
514 return VL;
515 }
516
517 /// Copy the register location in DBG_VALUE MI, updating the register to
518 /// be NewReg.
519 static VarLoc CreateCopyLoc(const VarLoc &OldVL, const MachineLoc &OldML,
520 Register NewReg) {
521 VarLoc VL = OldVL;
522 for (MachineLoc &ML : VL.Locs)
523 if (ML == OldML) {
524 ML.Kind = MachineLocKind::RegisterKind;
525 ML.Value.RegNo = NewReg;
526 return VL;
527 }
528 llvm_unreachable("Should have found OldML in new VarLoc.");
529 }
530
531 /// Take the variable described by DBG_VALUE* MI, and create a VarLoc
532 /// locating it in the specified spill location.
533 static VarLoc CreateSpillLoc(const VarLoc &OldVL, const MachineLoc &OldML,
534 unsigned SpillBase, StackOffset SpillOffset) {
535 VarLoc VL = OldVL;
536 for (MachineLoc &ML : VL.Locs)
537 if (ML == OldML) {
538 ML.Kind = MachineLocKind::SpillLocKind;
539 ML.Value.SpillLocation = {.SpillBase: SpillBase, .SpillOffset: SpillOffset};
540 return VL;
541 }
542 llvm_unreachable("Should have found OldML in new VarLoc.");
543 }
544
545 /// Create a DBG_VALUE representing this VarLoc in the given function.
546 /// Copies variable-specific information such as DILocalVariable and
547 /// inlining information from the original DBG_VALUE instruction, which may
548 /// have been several transfers ago.
549 MachineInstr *BuildDbgValue(MachineFunction &MF) const {
550 assert(!isEntryBackupLoc() &&
551 "Tried to produce DBG_VALUE for backup VarLoc");
552 const DebugLoc &DbgLoc = MI.getDebugLoc();
553 bool Indirect = MI.isIndirectDebugValue();
554 const auto &IID = MI.getDesc();
555 const DILocalVariable *Var = MI.getDebugVariable();
556 NumInserted++;
557
558 const DIExpression *DIExpr = Expr;
559 SmallVector<MachineOperand, 8> MOs;
560 for (unsigned I = 0, E = Locs.size(); I < E; ++I) {
561 MachineLocKind LocKind = Locs[I].Kind;
562 MachineLocValue Loc = Locs[I].Value;
563 const MachineOperand &Orig = MI.getDebugOperand(Index: OrigLocMap[I]);
564 switch (LocKind) {
565 case MachineLocKind::RegisterKind:
566 // An entry value is a register location -- but with an updated
567 // expression. The register location of such DBG_VALUE is always the
568 // one from the entry DBG_VALUE, it does not matter if the entry value
569 // was copied in to another register due to some optimizations.
570 // Non-entry value register locations are like the source
571 // DBG_VALUE, but with the register number from this VarLoc.
572 MOs.push_back(Elt: MachineOperand::CreateReg(
573 Reg: EVKind == EntryValueLocKind::EntryValueKind ? Orig.getReg()
574 : Register(Loc.RegNo),
575 isDef: false));
576 break;
577 case MachineLocKind::SpillLocKind: {
578 // Spills are indirect DBG_VALUEs, with a base register and offset.
579 // Use the original DBG_VALUEs expression to build the spilt location
580 // on top of. FIXME: spill locations created before this pass runs
581 // are not recognized, and not handled here.
582 unsigned Base = Loc.SpillLocation.SpillBase;
583 auto *TRI = MF.getSubtarget().getRegisterInfo();
584 if (MI.isNonListDebugValue()) {
585 auto Deref = Indirect ? DIExpression::DerefAfter : 0;
586 DIExpr = TRI->prependOffsetExpression(
587 Expr: DIExpr, PrependFlags: DIExpression::ApplyOffset | Deref,
588 Offset: Loc.SpillLocation.SpillOffset);
589 Indirect = true;
590 } else {
591 SmallVector<uint64_t, 4> Ops;
592 TRI->getOffsetOpcodes(Offset: Loc.SpillLocation.SpillOffset, Ops);
593 Ops.push_back(Elt: dwarf::DW_OP_deref);
594 DIExpr = DIExpression::appendOpsToArg(Expr: DIExpr, Ops, ArgNo: I);
595 }
596 MOs.push_back(Elt: MachineOperand::CreateReg(Reg: Base, isDef: false));
597 break;
598 }
599 case MachineLocKind::ImmediateKind: {
600 MOs.push_back(Elt: Orig);
601 break;
602 }
603 case MachineLocKind::WasmLocKind: {
604 MOs.push_back(Elt: Orig);
605 break;
606 }
607 case MachineLocKind::InvalidKind:
608 llvm_unreachable("Tried to produce DBG_VALUE for invalid VarLoc");
609 }
610 }
611 return BuildMI(MF, DL: DbgLoc, MCID: IID, IsIndirect: Indirect, MOs, Variable: Var, Expr: DIExpr);
612 }
613
614 /// Is the Loc field a constant or constant object?
615 bool isConstant(MachineLocKind Kind) const {
616 return Kind == MachineLocKind::ImmediateKind;
617 }
618
619 /// Check if the Loc field is an entry backup location.
620 bool isEntryBackupLoc() const {
621 return EVKind == EntryValueLocKind::EntryValueBackupKind ||
622 EVKind == EntryValueLocKind::EntryValueCopyBackupKind;
623 }
624
625 /// If this variable is described by register \p Reg holding the entry
626 /// value, return true.
627 bool isEntryValueBackupReg(Register Reg) const {
628 return EVKind == EntryValueLocKind::EntryValueBackupKind && usesReg(Reg);
629 }
630
631 /// If this variable is described by register \p Reg holding a copy of the
632 /// entry value, return true.
633 bool isEntryValueCopyBackupReg(Register Reg) const {
634 return EVKind == EntryValueLocKind::EntryValueCopyBackupKind &&
635 usesReg(Reg);
636 }
637
638 /// If this variable is described in whole or part by \p Reg, return true.
639 bool usesReg(Register Reg) const {
640 MachineLoc RegML;
641 RegML.Kind = MachineLocKind::RegisterKind;
642 RegML.Value.RegNo = Reg;
643 return is_contained(Range: Locs, Element: RegML);
644 }
645
646 /// If this variable is described in whole or part by \p Reg, return true.
647 unsigned getRegIdx(Register Reg) const {
648 for (unsigned Idx = 0; Idx < Locs.size(); ++Idx)
649 if (Locs[Idx].Kind == MachineLocKind::RegisterKind &&
650 Register{static_cast<unsigned>(Locs[Idx].Value.RegNo)} == Reg)
651 return Idx;
652 llvm_unreachable("Could not find given Reg in Locs");
653 }
654
655 /// If this variable is described in whole or part by 1 or more registers,
656 /// add each of them to \p Regs and return true.
657 bool getDescribingRegs(SmallVectorImpl<uint32_t> &Regs) const {
658 bool AnyRegs = false;
659 for (const auto &Loc : Locs)
660 if (Loc.Kind == MachineLocKind::RegisterKind) {
661 Regs.push_back(Elt: Loc.Value.RegNo);
662 AnyRegs = true;
663 }
664 return AnyRegs;
665 }
666
667 bool containsSpillLocs() const {
668 return any_of(Range: Locs, P: [](VarLoc::MachineLoc ML) {
669 return ML.Kind == VarLoc::MachineLocKind::SpillLocKind;
670 });
671 }
672
673 /// If this variable is described in whole or part by \p SpillLocation,
674 /// return true.
675 bool usesSpillLoc(SpillLoc SpillLocation) const {
676 MachineLoc SpillML;
677 SpillML.Kind = MachineLocKind::SpillLocKind;
678 SpillML.Value.SpillLocation = SpillLocation;
679 return is_contained(Range: Locs, Element: SpillML);
680 }
681
682 /// If this variable is described in whole or part by \p SpillLocation,
683 /// return the index .
684 unsigned getSpillLocIdx(SpillLoc SpillLocation) const {
685 for (unsigned Idx = 0; Idx < Locs.size(); ++Idx)
686 if (Locs[Idx].Kind == MachineLocKind::SpillLocKind &&
687 Locs[Idx].Value.SpillLocation == SpillLocation)
688 return Idx;
689 llvm_unreachable("Could not find given SpillLoc in Locs");
690 }
691
692 bool containsWasmLocs() const {
693 return any_of(Range: Locs, P: [](VarLoc::MachineLoc ML) {
694 return ML.Kind == VarLoc::MachineLocKind::WasmLocKind;
695 });
696 }
697
698 /// If this variable is described in whole or part by \p WasmLocation,
699 /// return true.
700 bool usesWasmLoc(WasmLoc WasmLocation) const {
701 MachineLoc WasmML;
702 WasmML.Kind = MachineLocKind::WasmLocKind;
703 WasmML.Value.WasmLocation = WasmLocation;
704 return is_contained(Range: Locs, Element: WasmML);
705 }
706
707 /// Determine whether the lexical scope of this value's debug location
708 /// dominates MBB.
709 bool dominates(LexicalScopes &LS, MachineBasicBlock &MBB) const {
710 return LS.dominates(DL: MI.getDebugLoc().get(), MBB: &MBB);
711 }
712
713#if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
714 // TRI and TII can be null.
715 void dump(const TargetRegisterInfo *TRI, const TargetInstrInfo *TII,
716 raw_ostream &Out = dbgs()) const {
717 Out << "VarLoc(";
718 for (const MachineLoc &MLoc : Locs) {
719 if (Locs.begin() != &MLoc)
720 Out << ", ";
721 switch (MLoc.Kind) {
722 case MachineLocKind::RegisterKind:
723 Out << printReg(MLoc.Value.RegNo, TRI);
724 break;
725 case MachineLocKind::SpillLocKind:
726 Out << printReg(MLoc.Value.SpillLocation.SpillBase, TRI);
727 Out << "[" << MLoc.Value.SpillLocation.SpillOffset.getFixed() << " + "
728 << MLoc.Value.SpillLocation.SpillOffset.getScalable()
729 << "x vscale"
730 << "]";
731 break;
732 case MachineLocKind::ImmediateKind:
733 Out << MLoc.Value.Immediate;
734 break;
735 case MachineLocKind::WasmLocKind: {
736 if (TII) {
737 auto Indices = TII->getSerializableTargetIndices();
738 auto Found =
739 find_if(Indices, [&](const std::pair<int, const char *> &I) {
740 return I.first == MLoc.Value.WasmLocation.Index;
741 });
742 assert(Found != Indices.end());
743 Out << Found->second;
744 if (MLoc.Value.WasmLocation.Offset > 0)
745 Out << " + " << MLoc.Value.WasmLocation.Offset;
746 } else {
747 Out << "WasmLoc";
748 }
749 break;
750 }
751 case MachineLocKind::InvalidKind:
752 llvm_unreachable("Invalid VarLoc in dump method");
753 }
754 }
755
756 Out << ", \"" << Var.getVariable()->getName() << "\", " << *Expr << ", ";
757 if (Var.getInlinedAt())
758 Out << "!" << Var.getInlinedAt()->getMetadataID() << ")\n";
759 else
760 Out << "(null))";
761
762 if (isEntryBackupLoc())
763 Out << " (backup loc)\n";
764 else
765 Out << "\n";
766 }
767#endif
768
769 bool operator==(const VarLoc &Other) const {
770 return std::tie(args: EVKind, args: Var, args: Expr, args: Locs) ==
771 std::tie(args: Other.EVKind, args: Other.Var, args: Other.Expr, args: Other.Locs);
772 }
773
774 /// This operator guarantees that VarLocs are sorted by Variable first.
775 bool operator<(const VarLoc &Other) const {
776 return std::tie(args: Var, args: EVKind, args: Locs, args: Expr) <
777 std::tie(args: Other.Var, args: Other.EVKind, args: Other.Locs, args: Other.Expr);
778 }
779 };
780
781#ifndef NDEBUG
782 using VarVec = SmallVector<VarLoc, 32>;
783#endif
784
785 /// VarLocMap is used for two things:
786 /// 1) Assigning LocIndices to a VarLoc. The LocIndices can be used to
787 /// virtually insert a VarLoc into a VarLocSet.
788 /// 2) Given a LocIndex, look up the unique associated VarLoc.
789 class VarLocMap {
790 /// Map a VarLoc to an index within the vector reserved for its location
791 /// within Loc2Vars.
792 std::map<VarLoc, LocIndices> Var2Indices;
793
794 /// Map a location to a vector which holds VarLocs which live in that
795 /// location.
796 SmallDenseMap<LocIndex::u32_location_t, std::vector<VarLoc>> Loc2Vars;
797
798 public:
799 /// Retrieve LocIndices for \p VL.
800 LocIndices insert(const VarLoc &VL) {
801 LocIndices &Indices = Var2Indices[VL];
802 // If Indices is not empty, VL is already in the map.
803 if (!Indices.empty())
804 return Indices;
805 SmallVector<LocIndex::u32_location_t, 4> Locations;
806 // LocIndices are determined by EVKind and MLs; each Register has a
807 // unique location, while all SpillLocs use a single bucket, and any EV
808 // VarLocs use only the Backup bucket or none at all (except the
809 // compulsory entry at the universal location index). LocIndices will
810 // always have an index at the universal location index as the last index.
811 if (VL.EVKind == VarLoc::EntryValueLocKind::NonEntryValueKind) {
812 VL.getDescribingRegs(Regs&: Locations);
813 assert(all_of(Locations,
814 [](auto RegNo) {
815 return (RegNo < LocIndex::kFirstInvalidRegLocation) ||
816 (LocIndex::kFirstVirtualRegLocation <= RegNo);
817 }) &&
818 "Physical or virtual register out of range?");
819 if (VL.containsSpillLocs())
820 Locations.push_back(Elt: LocIndex::kSpillLocation);
821 if (VL.containsWasmLocs())
822 Locations.push_back(Elt: LocIndex::kWasmLocation);
823 } else if (VL.EVKind != VarLoc::EntryValueLocKind::EntryValueKind) {
824 LocIndex::u32_location_t Loc = LocIndex::kEntryValueBackupLocation;
825 Locations.push_back(Elt: Loc);
826 }
827 Locations.push_back(Elt: LocIndex::kUniversalLocation);
828 for (LocIndex::u32_location_t Location : Locations) {
829 auto &Vars = Loc2Vars[Location];
830 Indices.push_back(
831 Elt: {Location, static_cast<LocIndex::u32_index_t>(Vars.size())});
832 Vars.push_back(x: VL);
833 }
834 return Indices;
835 }
836
837 LocIndices getAllIndices(const VarLoc &VL) const {
838 auto IndIt = Var2Indices.find(x: VL);
839 assert(IndIt != Var2Indices.end() && "VarLoc not tracked");
840 return IndIt->second;
841 }
842
843 /// Retrieve the unique VarLoc associated with \p ID.
844 const VarLoc &operator[](LocIndex ID) const {
845 auto LocIt = Loc2Vars.find(Val: ID.Location);
846 assert(LocIt != Loc2Vars.end() && "Location not tracked");
847 return LocIt->second[ID.Index];
848 }
849 };
850
851 using VarLocInMBB =
852 SmallDenseMap<const MachineBasicBlock *, std::unique_ptr<VarLocSet>>;
853 struct TransferDebugPair {
854 MachineInstr *TransferInst; ///< Instruction where this transfer occurs.
855 LocIndex LocationID; ///< Location number for the transfer dest.
856 };
857 using TransferMap = SmallVector<TransferDebugPair, 4>;
858 // Types for recording Entry Var Locations emitted by a single MachineInstr,
859 // as well as recording MachineInstr which last defined a register.
860 using InstToEntryLocMap = std::multimap<const MachineInstr *, LocIndex>;
861 using RegDefToInstMap = DenseMap<Register, MachineInstr *>;
862
863 // Types for recording sets of variable fragments that overlap. For a given
864 // local variable, we record all other fragments of that variable that could
865 // overlap it, to reduce search time.
866 using FragmentOfVar =
867 std::pair<const DILocalVariable *, DIExpression::FragmentInfo>;
868 using OverlapMap =
869 DenseMap<FragmentOfVar, SmallVector<DIExpression::FragmentInfo, 1>>;
870
871 // Helper while building OverlapMap, a map of all fragments seen for a given
872 // DILocalVariable.
873 using VarToFragments =
874 DenseMap<const DILocalVariable *, SmallSet<FragmentInfo, 4>>;
875
876 /// Collects all VarLocs from \p CollectFrom. Each unique VarLoc is added
877 /// to \p Collected once, in order of insertion into \p VarLocIDs.
878 static void collectAllVarLocs(SmallVectorImpl<VarLoc> &Collected,
879 const VarLocSet &CollectFrom,
880 const VarLocMap &VarLocIDs);
881
882 /// Get the registers which are used by VarLocs of kind RegisterKind tracked
883 /// by \p CollectFrom.
884 void getUsedRegs(const VarLocSet &CollectFrom,
885 SmallVectorImpl<Register> &UsedRegs) const;
886
887 /// This holds the working set of currently open ranges. For fast
888 /// access, this is done both as a set of VarLocIDs, and a map of
889 /// DebugVariable to recent VarLocID. Note that a DBG_VALUE ends all
890 /// previous open ranges for the same variable. In addition, we keep
891 /// two different maps (Vars/EntryValuesBackupVars), so erase/insert
892 /// methods act differently depending on whether a VarLoc is primary
893 /// location or backup one. In the case the VarLoc is backup location
894 /// we will erase/insert from the EntryValuesBackupVars map, otherwise
895 /// we perform the operation on the Vars.
896 class OpenRangesSet {
897 VarLocSet::Allocator &Alloc;
898 VarLocSet VarLocs;
899 // Map the DebugVariable to recent primary location ID.
900 SmallDenseMap<DebugVariable, LocIndices, 8> Vars;
901 // Map the DebugVariable to recent backup location ID.
902 SmallDenseMap<DebugVariable, LocIndices, 8> EntryValuesBackupVars;
903 OverlapMap &OverlappingFragments;
904
905 public:
906 OpenRangesSet(VarLocSet::Allocator &Alloc, OverlapMap &_OLapMap)
907 : Alloc(Alloc), VarLocs(Alloc), OverlappingFragments(_OLapMap) {}
908
909 const VarLocSet &getVarLocs() const { return VarLocs; }
910
911 // Fetches all VarLocs in \p VarLocIDs and inserts them into \p Collected.
912 // This method is needed to get every VarLoc once, as each VarLoc may have
913 // multiple indices in a VarLocMap (corresponding to each applicable
914 // location), but all VarLocs appear exactly once at the universal location
915 // index.
916 void getUniqueVarLocs(SmallVectorImpl<VarLoc> &Collected,
917 const VarLocMap &VarLocIDs) const {
918 collectAllVarLocs(Collected, CollectFrom: VarLocs, VarLocIDs);
919 }
920
921 /// Terminate all open ranges for VL.Var by removing it from the set.
922 void erase(const VarLoc &VL);
923
924 /// Terminate all open ranges listed as indices in \c KillSet with
925 /// \c Location by removing them from the set.
926 void erase(const VarLocsInRange &KillSet, const VarLocMap &VarLocIDs,
927 LocIndex::u32_location_t Location);
928
929 /// Insert a new range into the set.
930 void insert(LocIndices VarLocIDs, const VarLoc &VL);
931
932 /// Insert a set of ranges.
933 void insertFromLocSet(const VarLocSet &ToLoad, const VarLocMap &Map);
934
935 std::optional<LocIndices> getEntryValueBackup(DebugVariable Var);
936
937 /// Empty the set.
938 void clear() {
939 VarLocs.clear();
940 Vars.clear();
941 EntryValuesBackupVars.clear();
942 }
943
944 /// Return whether the set is empty or not.
945 bool empty() const {
946 assert(Vars.empty() == EntryValuesBackupVars.empty() &&
947 Vars.empty() == VarLocs.empty() &&
948 "open ranges are inconsistent");
949 return VarLocs.empty();
950 }
951
952 /// Get an empty range of VarLoc IDs.
953 auto getEmptyVarLocRange() const {
954 return iterator_range<VarLocSet::const_iterator>(getVarLocs().end(),
955 getVarLocs().end());
956 }
957
958 /// Get all set IDs for VarLocs with MLs of kind RegisterKind in \p Reg.
959 auto getRegisterVarLocs(Register Reg) const {
960 return LocIndex::indexRangeForLocation(Set: getVarLocs(), Location: Reg);
961 }
962
963 /// Get all set IDs for VarLocs with MLs of kind SpillLocKind.
964 auto getSpillVarLocs() const {
965 return LocIndex::indexRangeForLocation(Set: getVarLocs(),
966 Location: LocIndex::kSpillLocation);
967 }
968
969 /// Get all set IDs for VarLocs of EVKind EntryValueBackupKind or
970 /// EntryValueCopyBackupKind.
971 auto getEntryValueBackupVarLocs() const {
972 return LocIndex::indexRangeForLocation(
973 Set: getVarLocs(), Location: LocIndex::kEntryValueBackupLocation);
974 }
975
976 /// Get all set IDs for VarLocs with MLs of kind WasmLocKind.
977 auto getWasmVarLocs() const {
978 return LocIndex::indexRangeForLocation(Set: getVarLocs(),
979 Location: LocIndex::kWasmLocation);
980 }
981 };
982
983 /// Collect all VarLoc IDs from \p CollectFrom for VarLocs with MLs of kind
984 /// RegisterKind which are located in any reg in \p Regs. The IDs for each
985 /// VarLoc correspond to entries in the universal location bucket, which every
986 /// VarLoc has exactly 1 entry for. Insert collected IDs into \p Collected.
987 static void collectIDsForRegs(VarLocsInRange &Collected,
988 const DefinedRegsSet &Regs,
989 const VarLocSet &CollectFrom,
990 const VarLocMap &VarLocIDs);
991
992 VarLocSet &getVarLocsInMBB(const MachineBasicBlock *MBB, VarLocInMBB &Locs) {
993 std::unique_ptr<VarLocSet> &VLS = Locs[MBB];
994 if (!VLS)
995 VLS = std::make_unique<VarLocSet>(args&: Alloc);
996 return *VLS;
997 }
998
999 const VarLocSet &getVarLocsInMBB(const MachineBasicBlock *MBB,
1000 const VarLocInMBB &Locs) const {
1001 auto It = Locs.find(Val: MBB);
1002 assert(It != Locs.end() && "MBB not in map");
1003 return *It->second;
1004 }
1005
1006 /// Tests whether this instruction is a spill to a stack location.
1007 bool isSpillInstruction(const MachineInstr &MI, MachineFunction *MF);
1008
1009 /// Decide if @MI is a spill instruction and return true if it is. We use 2
1010 /// criteria to make this decision:
1011 /// - Is this instruction a store to a spill slot?
1012 /// - Is there a register operand that is both used and killed?
1013 /// TODO: Store optimization can fold spills into other stores (including
1014 /// other spills). We do not handle this yet (more than one memory operand).
1015 bool isLocationSpill(const MachineInstr &MI, MachineFunction *MF,
1016 Register &Reg);
1017
1018 /// Returns true if the given machine instruction is a debug value which we
1019 /// can emit entry values for.
1020 ///
1021 /// Currently, we generate debug entry values only for parameters that are
1022 /// unmodified throughout the function and located in a register.
1023 bool isEntryValueCandidate(const MachineInstr &MI,
1024 const DefinedRegsSet &Regs) const;
1025
1026 /// If a given instruction is identified as a spill, return the spill location
1027 /// and set \p Reg to the spilled register.
1028 std::optional<VarLoc::SpillLoc> isRestoreInstruction(const MachineInstr &MI,
1029 MachineFunction *MF,
1030 Register &Reg);
1031 /// Given a spill instruction, extract the register and offset used to
1032 /// address the spill location in a target independent way.
1033 VarLoc::SpillLoc extractSpillBaseRegAndOffset(const MachineInstr &MI);
1034 void insertTransferDebugPair(MachineInstr &MI, OpenRangesSet &OpenRanges,
1035 TransferMap &Transfers, VarLocMap &VarLocIDs,
1036 LocIndex OldVarID, TransferKind Kind,
1037 const VarLoc::MachineLoc &OldLoc,
1038 Register NewReg = Register());
1039
1040 void transferDebugValue(const MachineInstr &MI, OpenRangesSet &OpenRanges,
1041 VarLocMap &VarLocIDs,
1042 InstToEntryLocMap &EntryValTransfers,
1043 RegDefToInstMap &RegSetInstrs);
1044 void transferSpillOrRestoreInst(MachineInstr &MI, OpenRangesSet &OpenRanges,
1045 VarLocMap &VarLocIDs, TransferMap &Transfers);
1046 void cleanupEntryValueTransfers(const MachineInstr *MI,
1047 OpenRangesSet &OpenRanges,
1048 VarLocMap &VarLocIDs, const VarLoc &EntryVL,
1049 InstToEntryLocMap &EntryValTransfers);
1050 void removeEntryValue(const MachineInstr &MI, OpenRangesSet &OpenRanges,
1051 VarLocMap &VarLocIDs, const VarLoc &EntryVL,
1052 InstToEntryLocMap &EntryValTransfers,
1053 RegDefToInstMap &RegSetInstrs);
1054 void emitEntryValues(MachineInstr &MI, OpenRangesSet &OpenRanges,
1055 VarLocMap &VarLocIDs,
1056 InstToEntryLocMap &EntryValTransfers,
1057 VarLocsInRange &KillSet);
1058 void recordEntryValue(const MachineInstr &MI,
1059 const DefinedRegsSet &DefinedRegs,
1060 OpenRangesSet &OpenRanges, VarLocMap &VarLocIDs);
1061 void transferRegisterCopy(MachineInstr &MI, OpenRangesSet &OpenRanges,
1062 VarLocMap &VarLocIDs, TransferMap &Transfers);
1063 void transferRegisterDef(MachineInstr &MI, OpenRangesSet &OpenRanges,
1064 VarLocMap &VarLocIDs,
1065 InstToEntryLocMap &EntryValTransfers,
1066 RegDefToInstMap &RegSetInstrs);
1067 void transferWasmDef(MachineInstr &MI, OpenRangesSet &OpenRanges,
1068 VarLocMap &VarLocIDs);
1069 bool transferTerminator(MachineBasicBlock *MBB, OpenRangesSet &OpenRanges,
1070 VarLocInMBB &OutLocs, const VarLocMap &VarLocIDs);
1071
1072 void process(MachineInstr &MI, OpenRangesSet &OpenRanges,
1073 VarLocMap &VarLocIDs, TransferMap &Transfers,
1074 InstToEntryLocMap &EntryValTransfers,
1075 RegDefToInstMap &RegSetInstrs);
1076
1077 void accumulateFragmentMap(MachineInstr &MI, VarToFragments &SeenFragments,
1078 OverlapMap &OLapMap);
1079
1080 bool join(MachineBasicBlock &MBB, VarLocInMBB &OutLocs, VarLocInMBB &InLocs,
1081 const VarLocMap &VarLocIDs,
1082 SmallPtrSet<const MachineBasicBlock *, 16> &Visited,
1083 SmallPtrSetImpl<const MachineBasicBlock *> &ArtificialBlocks);
1084
1085 /// Create DBG_VALUE insts for inlocs that have been propagated but
1086 /// had their instruction creation deferred.
1087 void flushPendingLocs(VarLocInMBB &PendingInLocs, VarLocMap &VarLocIDs);
1088
1089 bool ExtendRanges(MachineFunction &MF, MachineDominatorTree *DomTree,
1090 bool ShouldEmitDebugEntryValues, unsigned InputBBLimit,
1091 unsigned InputDbgValLimit) override;
1092
1093public:
1094 /// Default construct and initialize the pass.
1095 VarLocBasedLDV();
1096
1097 ~VarLocBasedLDV();
1098
1099 /// Print to ostream with a message.
1100 void printVarLocInMBB(const MachineFunction &MF, const VarLocInMBB &V,
1101 const VarLocMap &VarLocIDs, const char *msg,
1102 raw_ostream &Out) const;
1103};
1104
1105} // end anonymous namespace
1106
1107//===----------------------------------------------------------------------===//
1108// Implementation
1109//===----------------------------------------------------------------------===//
1110
1111VarLocBasedLDV::VarLocBasedLDV() = default;
1112
1113VarLocBasedLDV::~VarLocBasedLDV() = default;
1114
1115/// Erase a variable from the set of open ranges, and additionally erase any
1116/// fragments that may overlap it. If the VarLoc is a backup location, erase
1117/// the variable from the EntryValuesBackupVars set, indicating we should stop
1118/// tracking its backup entry location. Otherwise, if the VarLoc is primary
1119/// location, erase the variable from the Vars set.
1120void VarLocBasedLDV::OpenRangesSet::erase(const VarLoc &VL) {
1121 // Erasure helper.
1122 auto DoErase = [&VL, this](DebugVariable VarToErase) {
1123 auto *EraseFrom = VL.isEntryBackupLoc() ? &EntryValuesBackupVars : &Vars;
1124 auto It = EraseFrom->find(Val: VarToErase);
1125 if (It != EraseFrom->end()) {
1126 LocIndices IDs = It->second;
1127 for (LocIndex ID : IDs)
1128 VarLocs.reset(Index: ID.getAsRawInteger());
1129 EraseFrom->erase(I: It);
1130 }
1131 };
1132
1133 DebugVariable Var = VL.Var;
1134
1135 // Erase the variable/fragment that ends here.
1136 DoErase(Var);
1137
1138 // Extract the fragment. Interpret an empty fragment as one that covers all
1139 // possible bits.
1140 FragmentInfo ThisFragment = Var.getFragmentOrDefault();
1141
1142 // There may be fragments that overlap the designated fragment. Look them up
1143 // in the pre-computed overlap map, and erase them too.
1144 auto MapIt = OverlappingFragments.find(Val: {Var.getVariable(), ThisFragment});
1145 if (MapIt != OverlappingFragments.end()) {
1146 for (auto Fragment : MapIt->second) {
1147 VarLocBasedLDV::OptFragmentInfo FragmentHolder;
1148 if (!DebugVariable::isDefaultFragment(F: Fragment))
1149 FragmentHolder = VarLocBasedLDV::OptFragmentInfo(Fragment);
1150 DoErase({Var.getVariable(), FragmentHolder, Var.getInlinedAt()});
1151 }
1152 }
1153}
1154
1155void VarLocBasedLDV::OpenRangesSet::erase(const VarLocsInRange &KillSet,
1156 const VarLocMap &VarLocIDs,
1157 LocIndex::u32_location_t Location) {
1158 VarLocSet RemoveSet(Alloc);
1159 for (LocIndex::u32_index_t ID : KillSet) {
1160 const VarLoc &VL = VarLocIDs[LocIndex(Location, ID)];
1161 auto *EraseFrom = VL.isEntryBackupLoc() ? &EntryValuesBackupVars : &Vars;
1162 EraseFrom->erase(Val: VL.Var);
1163 LocIndices VLI = VarLocIDs.getAllIndices(VL);
1164 for (LocIndex ID : VLI)
1165 RemoveSet.set(ID.getAsRawInteger());
1166 }
1167 VarLocs.intersectWithComplement(Other: RemoveSet);
1168}
1169
1170void VarLocBasedLDV::OpenRangesSet::insertFromLocSet(const VarLocSet &ToLoad,
1171 const VarLocMap &Map) {
1172 VarLocsInRange UniqueVarLocIDs;
1173 DefinedRegsSet Regs;
1174 Regs.insert(V: LocIndex::kUniversalLocation);
1175 collectIDsForRegs(Collected&: UniqueVarLocIDs, Regs, CollectFrom: ToLoad, VarLocIDs: Map);
1176 for (uint64_t ID : UniqueVarLocIDs) {
1177 LocIndex Idx = LocIndex::fromRawInteger(ID);
1178 const VarLoc &VarL = Map[Idx];
1179 const LocIndices Indices = Map.getAllIndices(VL: VarL);
1180 insert(VarLocIDs: Indices, VL: VarL);
1181 }
1182}
1183
1184void VarLocBasedLDV::OpenRangesSet::insert(LocIndices VarLocIDs,
1185 const VarLoc &VL) {
1186 auto *InsertInto = VL.isEntryBackupLoc() ? &EntryValuesBackupVars : &Vars;
1187 for (LocIndex ID : VarLocIDs)
1188 VarLocs.set(ID.getAsRawInteger());
1189 InsertInto->insert(KV: {VL.Var, VarLocIDs});
1190}
1191
1192/// Return the Loc ID of an entry value backup location, if it exists for the
1193/// variable.
1194std::optional<LocIndices>
1195VarLocBasedLDV::OpenRangesSet::getEntryValueBackup(DebugVariable Var) {
1196 auto It = EntryValuesBackupVars.find(Val: Var);
1197 if (It != EntryValuesBackupVars.end())
1198 return It->second;
1199
1200 return std::nullopt;
1201}
1202
1203void VarLocBasedLDV::collectIDsForRegs(VarLocsInRange &Collected,
1204 const DefinedRegsSet &Regs,
1205 const VarLocSet &CollectFrom,
1206 const VarLocMap &VarLocIDs) {
1207 assert(!Regs.empty() && "Nothing to collect");
1208 SmallVector<Register, 32> SortedRegs;
1209 append_range(C&: SortedRegs, R: Regs);
1210 array_pod_sort(Start: SortedRegs.begin(), End: SortedRegs.end());
1211 auto It = CollectFrom.find(Index: LocIndex::rawIndexForReg(Reg: SortedRegs.front()));
1212 auto End = CollectFrom.end();
1213 for (Register Reg : SortedRegs) {
1214 // The half-open interval [FirstIndexForReg, FirstInvalidIndex) contains
1215 // all possible VarLoc IDs for VarLocs with MLs of kind RegisterKind which
1216 // live in Reg.
1217 uint64_t FirstIndexForReg = LocIndex::rawIndexForReg(Reg);
1218 uint64_t FirstInvalidIndex = LocIndex::rawIndexForReg(Reg: Reg + 1);
1219 It.advanceToLowerBound(Index: FirstIndexForReg);
1220
1221 // Iterate through that half-open interval and collect all the set IDs.
1222 for (; It != End && *It < FirstInvalidIndex; ++It) {
1223 LocIndex ItIdx = LocIndex::fromRawInteger(ID: *It);
1224 const VarLoc &VL = VarLocIDs[ItIdx];
1225 LocIndices LI = VarLocIDs.getAllIndices(VL);
1226 // For now, the back index is always the universal location index.
1227 assert(LI.back().Location == LocIndex::kUniversalLocation &&
1228 "Unexpected order of LocIndices for VarLoc; was it inserted into "
1229 "the VarLocMap correctly?");
1230 Collected.insert(V: LI.back().Index);
1231 }
1232
1233 if (It == End)
1234 return;
1235 }
1236}
1237
1238void VarLocBasedLDV::getUsedRegs(const VarLocSet &CollectFrom,
1239 SmallVectorImpl<Register> &UsedRegs) const {
1240 // All register-based VarLocs are assigned indices greater than or equal to
1241 // FirstRegIndex.
1242 uint64_t FirstRegIndex =
1243 LocIndex::rawIndexForReg(Reg: LocIndex::kFirstRegLocation);
1244 uint64_t FirstInvalidIndex =
1245 LocIndex::rawIndexForReg(Reg: LocIndex::kFirstInvalidRegLocation);
1246 uint64_t FirstVirtualRegIndex =
1247 LocIndex::rawIndexForReg(Reg: LocIndex::kFirstVirtualRegLocation);
1248 auto doGetUsedRegs = [&](VarLocSet::const_iterator &It) {
1249 // We found a VarLoc ID for a VarLoc that lives in a register. Figure out
1250 // which register and add it to UsedRegs.
1251 uint32_t FoundReg = LocIndex::fromRawInteger(ID: *It).Location;
1252 assert((UsedRegs.empty() || FoundReg != UsedRegs.back()) &&
1253 "Duplicate used reg");
1254 UsedRegs.push_back(Elt: FoundReg);
1255
1256 // Skip to the next /set/ register. Note that this finds a lower bound, so
1257 // even if there aren't any VarLocs living in `FoundReg+1`, we're still
1258 // guaranteed to move on to the next register (or to end()).
1259 uint64_t NextRegIndex = LocIndex::rawIndexForReg(Reg: FoundReg + 1);
1260 It.advanceToLowerBound(Index: NextRegIndex);
1261 };
1262 for (auto It = CollectFrom.find(Index: FirstRegIndex),
1263 End = CollectFrom.find(Index: FirstInvalidIndex);
1264 It != End;) {
1265 doGetUsedRegs(It);
1266 }
1267 for (auto It = CollectFrom.find(Index: FirstVirtualRegIndex),
1268 End = CollectFrom.end();
1269 It != End;) {
1270 doGetUsedRegs(It);
1271 }
1272}
1273
1274//===----------------------------------------------------------------------===//
1275// Debug Range Extension Implementation
1276//===----------------------------------------------------------------------===//
1277
1278#ifndef NDEBUG
1279void VarLocBasedLDV::printVarLocInMBB(const MachineFunction &MF,
1280 const VarLocInMBB &V,
1281 const VarLocMap &VarLocIDs,
1282 const char *msg,
1283 raw_ostream &Out) const {
1284 Out << '\n' << msg << '\n';
1285 for (const MachineBasicBlock &BB : MF) {
1286 if (!V.count(&BB))
1287 continue;
1288 const VarLocSet &L = getVarLocsInMBB(&BB, V);
1289 if (L.empty())
1290 continue;
1291 SmallVector<VarLoc, 32> VarLocs;
1292 collectAllVarLocs(VarLocs, L, VarLocIDs);
1293 Out << "MBB: " << BB.getNumber() << ":\n";
1294 for (const VarLoc &VL : VarLocs) {
1295 Out << " Var: " << VL.Var.getVariable()->getName();
1296 Out << " MI: ";
1297 VL.dump(TRI, TII, Out);
1298 }
1299 }
1300 Out << "\n";
1301}
1302#endif
1303
1304VarLocBasedLDV::VarLoc::SpillLoc
1305VarLocBasedLDV::extractSpillBaseRegAndOffset(const MachineInstr &MI) {
1306 assert(MI.hasOneMemOperand() &&
1307 "Spill instruction does not have exactly one memory operand?");
1308 auto MMOI = MI.memoperands_begin();
1309 const PseudoSourceValue *PVal = (*MMOI)->getPseudoValue();
1310 assert(PVal->kind() == PseudoSourceValue::FixedStack &&
1311 "Inconsistent memory operand in spill instruction");
1312 int FI = cast<FixedStackPseudoSourceValue>(Val: PVal)->getFrameIndex();
1313 const MachineBasicBlock *MBB = MI.getParent();
1314 Register Reg;
1315 StackOffset Offset = TFI->getFrameIndexReference(MF: *MBB->getParent(), FI, FrameReg&: Reg);
1316 return {.SpillBase: Reg, .SpillOffset: Offset};
1317}
1318
1319/// Do cleanup of \p EntryValTransfers created by \p TRInst, by removing the
1320/// Transfer, which uses the to-be-deleted \p EntryVL.
1321void VarLocBasedLDV::cleanupEntryValueTransfers(
1322 const MachineInstr *TRInst, OpenRangesSet &OpenRanges, VarLocMap &VarLocIDs,
1323 const VarLoc &EntryVL, InstToEntryLocMap &EntryValTransfers) {
1324 if (EntryValTransfers.empty() || TRInst == nullptr)
1325 return;
1326
1327 auto TransRange = EntryValTransfers.equal_range(x: TRInst);
1328 for (auto &TDPair : llvm::make_range(p: TransRange)) {
1329 const VarLoc &EmittedEV = VarLocIDs[TDPair.second];
1330 if (std::tie(args: EntryVL.Var, args: EntryVL.Locs[0].Value.RegNo, args: EntryVL.Expr) ==
1331 std::tie(args: EmittedEV.Var, args: EmittedEV.Locs[0].Value.RegNo,
1332 args: EmittedEV.Expr)) {
1333 OpenRanges.erase(VL: EmittedEV);
1334 EntryValTransfers.erase(x: TRInst);
1335 break;
1336 }
1337 }
1338}
1339
1340/// Try to salvage the debug entry value if we encounter a new debug value
1341/// describing the same parameter, otherwise stop tracking the value. Return
1342/// true if we should stop tracking the entry value and do the cleanup of
1343/// emitted Entry Value Transfers, otherwise return false.
1344void VarLocBasedLDV::removeEntryValue(const MachineInstr &MI,
1345 OpenRangesSet &OpenRanges,
1346 VarLocMap &VarLocIDs,
1347 const VarLoc &EntryVL,
1348 InstToEntryLocMap &EntryValTransfers,
1349 RegDefToInstMap &RegSetInstrs) {
1350 // Skip the DBG_VALUE which is the debug entry value itself.
1351 if (&MI == &EntryVL.MI)
1352 return;
1353
1354 // If the parameter's location is not register location, we can not track
1355 // the entry value any more. It doesn't have the TransferInst which defines
1356 // register, so no Entry Value Transfers have been emitted already.
1357 if (!MI.getDebugOperand(Index: 0).isReg())
1358 return;
1359
1360 // Try to get non-debug instruction responsible for the DBG_VALUE.
1361 Register Reg = MI.getDebugOperand(Index: 0).getReg();
1362 const MachineInstr *TransferInst =
1363 Reg.isValid() ? RegSetInstrs.lookup(Val: Reg) : nullptr;
1364
1365 // Case of the parameter's DBG_VALUE at the start of entry MBB.
1366 if (!TransferInst && !LastNonDbgMI && MI.getParent()->isEntryBlock())
1367 return;
1368
1369 // If the debug expression from the DBG_VALUE is not empty, we can assume the
1370 // parameter's value has changed indicating that we should stop tracking its
1371 // entry value as well.
1372 if (MI.getDebugExpression()->getNumElements() == 0 && TransferInst) {
1373 // If the DBG_VALUE comes from a copy instruction that copies the entry
1374 // value, it means the parameter's value has not changed and we should be
1375 // able to use its entry value.
1376 // TODO: Try to keep tracking of an entry value if we encounter a propagated
1377 // DBG_VALUE describing the copy of the entry value. (Propagated entry value
1378 // does not indicate the parameter modification.)
1379 auto DestSrc = TII->isCopyLikeInstr(MI: *TransferInst);
1380 if (DestSrc) {
1381 const MachineOperand *SrcRegOp, *DestRegOp;
1382 SrcRegOp = DestSrc->Source;
1383 DestRegOp = DestSrc->Destination;
1384 if (Reg == DestRegOp->getReg()) {
1385 for (uint64_t ID : OpenRanges.getEntryValueBackupVarLocs()) {
1386 const VarLoc &VL = VarLocIDs[LocIndex::fromRawInteger(ID)];
1387 if (VL.isEntryValueCopyBackupReg(Reg) &&
1388 // Entry Values should not be variadic.
1389 VL.MI.getDebugOperand(Index: 0).getReg() == SrcRegOp->getReg())
1390 return;
1391 }
1392 }
1393 }
1394 }
1395
1396 LLVM_DEBUG(dbgs() << "Deleting a DBG entry value because of: ";
1397 MI.print(dbgs(), /*IsStandalone*/ false,
1398 /*SkipOpers*/ false, /*SkipDebugLoc*/ false,
1399 /*AddNewLine*/ true, TII));
1400 cleanupEntryValueTransfers(TRInst: TransferInst, OpenRanges, VarLocIDs, EntryVL,
1401 EntryValTransfers);
1402 OpenRanges.erase(VL: EntryVL);
1403}
1404
1405/// End all previous ranges related to @MI and start a new range from @MI
1406/// if it is a DBG_VALUE instr.
1407void VarLocBasedLDV::transferDebugValue(const MachineInstr &MI,
1408 OpenRangesSet &OpenRanges,
1409 VarLocMap &VarLocIDs,
1410 InstToEntryLocMap &EntryValTransfers,
1411 RegDefToInstMap &RegSetInstrs) {
1412 if (!MI.isDebugValue())
1413 return;
1414 const DILocalVariable *Var = MI.getDebugVariable();
1415 const DIExpression *Expr = MI.getDebugExpression();
1416 const DILocation *DebugLoc = MI.getDebugLoc();
1417 const DILocation *InlinedAt = DebugLoc->getInlinedAt();
1418 assert(Var->isValidLocationForIntrinsic(DebugLoc) &&
1419 "Expected inlined-at fields to agree");
1420
1421 DebugVariable V(Var, Expr, InlinedAt);
1422
1423 // Check if this DBG_VALUE indicates a parameter's value changing.
1424 // If that is the case, we should stop tracking its entry value.
1425 auto EntryValBackupID = OpenRanges.getEntryValueBackup(Var: V);
1426 if (Var->isParameter() && EntryValBackupID) {
1427 const VarLoc &EntryVL = VarLocIDs[EntryValBackupID->back()];
1428 removeEntryValue(MI, OpenRanges, VarLocIDs, EntryVL, EntryValTransfers,
1429 RegSetInstrs);
1430 }
1431
1432 if (all_of(Range: MI.debug_operands(), P: [](const MachineOperand &MO) {
1433 return (MO.isReg() && MO.getReg()) || MO.isImm() || MO.isFPImm() ||
1434 MO.isCImm() || MO.isTargetIndex();
1435 })) {
1436 // Use normal VarLoc constructor for registers and immediates.
1437 VarLoc VL(MI);
1438 // End all previous ranges of VL.Var.
1439 OpenRanges.erase(VL);
1440
1441 LocIndices IDs = VarLocIDs.insert(VL);
1442 // Add the VarLoc to OpenRanges from this DBG_VALUE.
1443 OpenRanges.insert(VarLocIDs: IDs, VL);
1444 } else if (MI.memoperands().size() > 0) {
1445 llvm_unreachable("DBG_VALUE with mem operand encountered after regalloc?");
1446 } else {
1447 // This must be an undefined location. If it has an open range, erase it.
1448 assert(MI.isUndefDebugValue() &&
1449 "Unexpected non-undef DBG_VALUE encountered");
1450 VarLoc VL(MI);
1451 OpenRanges.erase(VL);
1452 }
1453}
1454
1455// This should be removed later, doesn't fit the new design.
1456void VarLocBasedLDV::collectAllVarLocs(SmallVectorImpl<VarLoc> &Collected,
1457 const VarLocSet &CollectFrom,
1458 const VarLocMap &VarLocIDs) {
1459 // The half-open interval [FirstIndexForReg, FirstInvalidIndex) contains all
1460 // possible VarLoc IDs for VarLocs with MLs of kind RegisterKind which live
1461 // in Reg.
1462 uint64_t FirstIndex = LocIndex::rawIndexForReg(Reg: LocIndex::kUniversalLocation);
1463 uint64_t FirstInvalidIndex =
1464 LocIndex::rawIndexForReg(Reg: LocIndex::kUniversalLocation + 1);
1465 // Iterate through that half-open interval and collect all the set IDs.
1466 for (auto It = CollectFrom.find(Index: FirstIndex), End = CollectFrom.end();
1467 It != End && *It < FirstInvalidIndex; ++It) {
1468 LocIndex RegIdx = LocIndex::fromRawInteger(ID: *It);
1469 Collected.push_back(Elt: VarLocIDs[RegIdx]);
1470 }
1471}
1472
1473/// Turn the entry value backup locations into primary locations.
1474void VarLocBasedLDV::emitEntryValues(MachineInstr &MI,
1475 OpenRangesSet &OpenRanges,
1476 VarLocMap &VarLocIDs,
1477 InstToEntryLocMap &EntryValTransfers,
1478 VarLocsInRange &KillSet) {
1479 // Do not insert entry value locations after a terminator.
1480 if (MI.isTerminator())
1481 return;
1482
1483 for (uint32_t ID : KillSet) {
1484 // The KillSet IDs are indices for the universal location bucket.
1485 LocIndex Idx = LocIndex(LocIndex::kUniversalLocation, ID);
1486 const VarLoc &VL = VarLocIDs[Idx];
1487 if (!VL.Var.getVariable()->isParameter())
1488 continue;
1489
1490 auto DebugVar = VL.Var;
1491 std::optional<LocIndices> EntryValBackupIDs =
1492 OpenRanges.getEntryValueBackup(Var: DebugVar);
1493
1494 // If the parameter has the entry value backup, it means we should
1495 // be able to use its entry value.
1496 if (!EntryValBackupIDs)
1497 continue;
1498
1499 const VarLoc &EntryVL = VarLocIDs[EntryValBackupIDs->back()];
1500 VarLoc EntryLoc = VarLoc::CreateEntryLoc(MI: EntryVL.MI, EntryExpr: EntryVL.Expr,
1501 Reg: EntryVL.Locs[0].Value.RegNo);
1502 LocIndices EntryValueIDs = VarLocIDs.insert(VL: EntryLoc);
1503 assert(EntryValueIDs.size() == 1 &&
1504 "EntryValue loc should not be variadic");
1505 EntryValTransfers.insert(x: {&MI, EntryValueIDs.back()});
1506 OpenRanges.insert(VarLocIDs: EntryValueIDs, VL: EntryLoc);
1507 }
1508}
1509
1510/// Create new TransferDebugPair and insert it in \p Transfers. The VarLoc
1511/// with \p OldVarID should be deleted form \p OpenRanges and replaced with
1512/// new VarLoc. If \p NewReg is different than default zero value then the
1513/// new location will be register location created by the copy like instruction,
1514/// otherwise it is variable's location on the stack.
1515void VarLocBasedLDV::insertTransferDebugPair(
1516 MachineInstr &MI, OpenRangesSet &OpenRanges, TransferMap &Transfers,
1517 VarLocMap &VarLocIDs, LocIndex OldVarID, TransferKind Kind,
1518 const VarLoc::MachineLoc &OldLoc, Register NewReg) {
1519 const VarLoc &OldVarLoc = VarLocIDs[OldVarID];
1520
1521 auto ProcessVarLoc = [&MI, &OpenRanges, &Transfers, &VarLocIDs](VarLoc &VL) {
1522 LocIndices LocIds = VarLocIDs.insert(VL);
1523
1524 // Close this variable's previous location range.
1525 OpenRanges.erase(VL);
1526
1527 // Record the new location as an open range, and a postponed transfer
1528 // inserting a DBG_VALUE for this location.
1529 OpenRanges.insert(VarLocIDs: LocIds, VL);
1530 assert(!MI.isTerminator() && "Cannot insert DBG_VALUE after terminator");
1531 TransferDebugPair MIP = {.TransferInst: &MI, .LocationID: LocIds.back()};
1532 Transfers.push_back(Elt: MIP);
1533 };
1534
1535 // End all previous ranges of VL.Var.
1536 OpenRanges.erase(VL: VarLocIDs[OldVarID]);
1537 switch (Kind) {
1538 case TransferKind::TransferCopy: {
1539 assert(NewReg &&
1540 "No register supplied when handling a copy of a debug value");
1541 // Create a DBG_VALUE instruction to describe the Var in its new
1542 // register location.
1543 VarLoc VL = VarLoc::CreateCopyLoc(OldVL: OldVarLoc, OldML: OldLoc, NewReg);
1544 ProcessVarLoc(VL);
1545 LLVM_DEBUG({
1546 dbgs() << "Creating VarLoc for register copy:";
1547 VL.dump(TRI, TII);
1548 });
1549 return;
1550 }
1551 case TransferKind::TransferSpill: {
1552 // Create a DBG_VALUE instruction to describe the Var in its spilled
1553 // location.
1554 VarLoc::SpillLoc SpillLocation = extractSpillBaseRegAndOffset(MI);
1555 VarLoc VL = VarLoc::CreateSpillLoc(
1556 OldVL: OldVarLoc, OldML: OldLoc, SpillBase: SpillLocation.SpillBase, SpillOffset: SpillLocation.SpillOffset);
1557 ProcessVarLoc(VL);
1558 LLVM_DEBUG({
1559 dbgs() << "Creating VarLoc for spill:";
1560 VL.dump(TRI, TII);
1561 });
1562 return;
1563 }
1564 case TransferKind::TransferRestore: {
1565 assert(NewReg &&
1566 "No register supplied when handling a restore of a debug value");
1567 // DebugInstr refers to the pre-spill location, therefore we can reuse
1568 // its expression.
1569 VarLoc VL = VarLoc::CreateCopyLoc(OldVL: OldVarLoc, OldML: OldLoc, NewReg);
1570 ProcessVarLoc(VL);
1571 LLVM_DEBUG({
1572 dbgs() << "Creating VarLoc for restore:";
1573 VL.dump(TRI, TII);
1574 });
1575 return;
1576 }
1577 }
1578 llvm_unreachable("Invalid transfer kind");
1579}
1580
1581/// A definition of a register may mark the end of a range.
1582void VarLocBasedLDV::transferRegisterDef(MachineInstr &MI,
1583 OpenRangesSet &OpenRanges,
1584 VarLocMap &VarLocIDs,
1585 InstToEntryLocMap &EntryValTransfers,
1586 RegDefToInstMap &RegSetInstrs) {
1587
1588 // Meta Instructions do not affect the debug liveness of any register they
1589 // define.
1590 if (MI.isMetaInstruction())
1591 return;
1592
1593 MachineFunction *MF = MI.getMF();
1594 const TargetLowering *TLI = MF->getSubtarget().getTargetLowering();
1595 Register SP = TLI->getStackPointerRegisterToSaveRestore();
1596
1597 // Find the regs killed by MI, and find regmasks of preserved regs.
1598 DefinedRegsSet DeadRegs;
1599 SmallVector<const uint32_t *, 4> RegMasks;
1600 for (const MachineOperand &MO : MI.operands()) {
1601 // Determine whether the operand is a register def.
1602 if (MO.isReg() && MO.isDef() && MO.getReg() && MO.getReg().isPhysical() &&
1603 !(MI.isCall() && MO.getReg() == SP)) {
1604 // Remove ranges of all aliased registers.
1605 for (MCRegAliasIterator RAI(MO.getReg(), TRI, true); RAI.isValid(); ++RAI)
1606 // FIXME: Can we break out of this loop early if no insertion occurs?
1607 DeadRegs.insert(V: (*RAI).id());
1608 RegSetInstrs.erase(Val: MO.getReg());
1609 RegSetInstrs.insert(KV: {MO.getReg(), &MI});
1610 } else if (MO.isRegMask()) {
1611 RegMasks.push_back(Elt: MO.getRegMask());
1612 }
1613 }
1614
1615 // Erase VarLocs which reside in one of the dead registers. For performance
1616 // reasons, it's critical to not iterate over the full set of open VarLocs.
1617 // Iterate over the set of dying/used regs instead.
1618 if (!RegMasks.empty()) {
1619 SmallVector<Register, 32> UsedRegs;
1620 getUsedRegs(CollectFrom: OpenRanges.getVarLocs(), UsedRegs);
1621 for (Register Reg : UsedRegs) {
1622 // Remove ranges of all clobbered registers. Register masks don't usually
1623 // list SP as preserved. Assume that call instructions never clobber SP,
1624 // because some backends (e.g., AArch64) never list SP in the regmask.
1625 // While the debug info may be off for an instruction or two around
1626 // callee-cleanup calls, transferring the DEBUG_VALUE across the call is
1627 // still a better user experience.
1628 if (Reg == SP)
1629 continue;
1630 bool AnyRegMaskKillsReg =
1631 any_of(Range&: RegMasks, P: [Reg](const uint32_t *RegMask) {
1632 return MachineOperand::clobbersPhysReg(RegMask, PhysReg: Reg);
1633 });
1634 if (AnyRegMaskKillsReg)
1635 DeadRegs.insert(V: Reg);
1636 if (AnyRegMaskKillsReg) {
1637 RegSetInstrs.erase(Val: Reg);
1638 RegSetInstrs.insert(KV: {Reg, &MI});
1639 }
1640 }
1641 }
1642
1643 if (DeadRegs.empty())
1644 return;
1645
1646 VarLocsInRange KillSet;
1647 collectIDsForRegs(Collected&: KillSet, Regs: DeadRegs, CollectFrom: OpenRanges.getVarLocs(), VarLocIDs);
1648 OpenRanges.erase(KillSet, VarLocIDs, Location: LocIndex::kUniversalLocation);
1649
1650 if (ShouldEmitDebugEntryValues)
1651 emitEntryValues(MI, OpenRanges, VarLocIDs, EntryValTransfers, KillSet);
1652}
1653
1654void VarLocBasedLDV::transferWasmDef(MachineInstr &MI,
1655 OpenRangesSet &OpenRanges,
1656 VarLocMap &VarLocIDs) {
1657 // If this is not a Wasm local.set or local.tee, which sets local values,
1658 // return.
1659 int Index;
1660 int64_t Offset;
1661 if (!TII->isExplicitTargetIndexDef(MI, Index, Offset))
1662 return;
1663
1664 // Find the target indices killed by MI, and delete those variable locations
1665 // from the open range.
1666 VarLocsInRange KillSet;
1667 VarLoc::WasmLoc Loc{.Index: Index, .Offset: Offset};
1668 for (uint64_t ID : OpenRanges.getWasmVarLocs()) {
1669 LocIndex Idx = LocIndex::fromRawInteger(ID);
1670 const VarLoc &VL = VarLocIDs[Idx];
1671 assert(VL.containsWasmLocs() && "Broken VarLocSet?");
1672 if (VL.usesWasmLoc(WasmLocation: Loc))
1673 KillSet.insert(V: ID);
1674 }
1675 OpenRanges.erase(KillSet, VarLocIDs, Location: LocIndex::kWasmLocation);
1676}
1677
1678bool VarLocBasedLDV::isSpillInstruction(const MachineInstr &MI,
1679 MachineFunction *MF) {
1680 // TODO: Handle multiple stores folded into one.
1681 if (!MI.hasOneMemOperand())
1682 return false;
1683
1684 if (!MI.getSpillSize(TII) && !MI.getFoldedSpillSize(TII))
1685 return false; // This is not a spill instruction, since no valid size was
1686 // returned from either function.
1687
1688 return true;
1689}
1690
1691bool VarLocBasedLDV::isLocationSpill(const MachineInstr &MI,
1692 MachineFunction *MF, Register &Reg) {
1693 if (!isSpillInstruction(MI, MF))
1694 return false;
1695
1696 auto isKilledReg = [&](const MachineOperand MO, Register &Reg) {
1697 if (!MO.isReg() || !MO.isUse()) {
1698 Reg = 0;
1699 return false;
1700 }
1701 Reg = MO.getReg();
1702 return MO.isKill();
1703 };
1704
1705 for (const MachineOperand &MO : MI.operands()) {
1706 // In a spill instruction generated by the InlineSpiller the spilled
1707 // register has its kill flag set.
1708 if (isKilledReg(MO, Reg))
1709 return true;
1710 if (Reg != 0) {
1711 // Check whether next instruction kills the spilled register.
1712 // FIXME: Current solution does not cover search for killed register in
1713 // bundles and instructions further down the chain.
1714 auto NextI = std::next(x: MI.getIterator());
1715 // Skip next instruction that points to basic block end iterator.
1716 if (MI.getParent()->end() == NextI)
1717 continue;
1718 Register RegNext;
1719 for (const MachineOperand &MONext : NextI->operands()) {
1720 // Return true if we came across the register from the
1721 // previous spill instruction that is killed in NextI.
1722 if (isKilledReg(MONext, RegNext) && RegNext == Reg)
1723 return true;
1724 }
1725 }
1726 }
1727 // Return false if we didn't find spilled register.
1728 return false;
1729}
1730
1731std::optional<VarLocBasedLDV::VarLoc::SpillLoc>
1732VarLocBasedLDV::isRestoreInstruction(const MachineInstr &MI,
1733 MachineFunction *MF, Register &Reg) {
1734 if (!MI.hasOneMemOperand())
1735 return std::nullopt;
1736
1737 // FIXME: Handle folded restore instructions with more than one memory
1738 // operand.
1739 if (MI.getRestoreSize(TII)) {
1740 Reg = MI.getOperand(i: 0).getReg();
1741 return extractSpillBaseRegAndOffset(MI);
1742 }
1743 return std::nullopt;
1744}
1745
1746/// A spilled register may indicate that we have to end the current range of
1747/// a variable and create a new one for the spill location.
1748/// A restored register may indicate the reverse situation.
1749/// We don't want to insert any instructions in process(), so we just create
1750/// the DBG_VALUE without inserting it and keep track of it in \p Transfers.
1751/// It will be inserted into the BB when we're done iterating over the
1752/// instructions.
1753void VarLocBasedLDV::transferSpillOrRestoreInst(MachineInstr &MI,
1754 OpenRangesSet &OpenRanges,
1755 VarLocMap &VarLocIDs,
1756 TransferMap &Transfers) {
1757 MachineFunction *MF = MI.getMF();
1758 TransferKind TKind;
1759 Register Reg;
1760 std::optional<VarLoc::SpillLoc> Loc;
1761
1762 LLVM_DEBUG(dbgs() << "Examining instruction: "; MI.dump(););
1763
1764 // First, if there are any DBG_VALUEs pointing at a spill slot that is
1765 // written to, then close the variable location. The value in memory
1766 // will have changed.
1767 VarLocsInRange KillSet;
1768 if (isSpillInstruction(MI, MF)) {
1769 Loc = extractSpillBaseRegAndOffset(MI);
1770 for (uint64_t ID : OpenRanges.getSpillVarLocs()) {
1771 LocIndex Idx = LocIndex::fromRawInteger(ID);
1772 const VarLoc &VL = VarLocIDs[Idx];
1773 assert(VL.containsSpillLocs() && "Broken VarLocSet?");
1774 if (VL.usesSpillLoc(SpillLocation: *Loc)) {
1775 // This location is overwritten by the current instruction -- terminate
1776 // the open range, and insert an explicit DBG_VALUE $noreg.
1777 //
1778 // Doing this at a later stage would require re-interpreting all
1779 // DBG_VALUes and DIExpressions to identify whether they point at
1780 // memory, and then analysing all memory writes to see if they
1781 // overwrite that memory, which is expensive.
1782 //
1783 // At this stage, we already know which DBG_VALUEs are for spills and
1784 // where they are located; it's best to fix handle overwrites now.
1785 KillSet.insert(V: ID);
1786 unsigned SpillLocIdx = VL.getSpillLocIdx(SpillLocation: *Loc);
1787 VarLoc::MachineLoc OldLoc = VL.Locs[SpillLocIdx];
1788 VarLoc UndefVL = VarLoc::CreateCopyLoc(OldVL: VL, OldML: OldLoc, NewReg: 0);
1789 LocIndices UndefLocIDs = VarLocIDs.insert(VL: UndefVL);
1790 Transfers.push_back(Elt: {.TransferInst: &MI, .LocationID: UndefLocIDs.back()});
1791 }
1792 }
1793 OpenRanges.erase(KillSet, VarLocIDs, Location: LocIndex::kSpillLocation);
1794 }
1795
1796 // Try to recognise spill and restore instructions that may create a new
1797 // variable location.
1798 if (isLocationSpill(MI, MF, Reg)) {
1799 TKind = TransferKind::TransferSpill;
1800 LLVM_DEBUG(dbgs() << "Recognized as spill: "; MI.dump(););
1801 LLVM_DEBUG(dbgs() << "Register: " << Reg.id() << " " << printReg(Reg, TRI)
1802 << "\n");
1803 } else {
1804 if (!(Loc = isRestoreInstruction(MI, MF, Reg)))
1805 return;
1806 TKind = TransferKind::TransferRestore;
1807 LLVM_DEBUG(dbgs() << "Recognized as restore: "; MI.dump(););
1808 LLVM_DEBUG(dbgs() << "Register: " << Reg.id() << " " << printReg(Reg, TRI)
1809 << "\n");
1810 }
1811 // Check if the register or spill location is the location of a debug value.
1812 auto TransferCandidates = OpenRanges.getEmptyVarLocRange();
1813 if (TKind == TransferKind::TransferSpill)
1814 TransferCandidates = OpenRanges.getRegisterVarLocs(Reg);
1815 else if (TKind == TransferKind::TransferRestore)
1816 TransferCandidates = OpenRanges.getSpillVarLocs();
1817 for (uint64_t ID : TransferCandidates) {
1818 LocIndex Idx = LocIndex::fromRawInteger(ID);
1819 const VarLoc &VL = VarLocIDs[Idx];
1820 unsigned LocIdx;
1821 if (TKind == TransferKind::TransferSpill) {
1822 assert(VL.usesReg(Reg) && "Broken VarLocSet?");
1823 LLVM_DEBUG(dbgs() << "Spilling Register " << printReg(Reg, TRI) << '('
1824 << VL.Var.getVariable()->getName() << ")\n");
1825 LocIdx = VL.getRegIdx(Reg);
1826 } else {
1827 assert(TKind == TransferKind::TransferRestore && VL.containsSpillLocs() &&
1828 "Broken VarLocSet?");
1829 if (!VL.usesSpillLoc(SpillLocation: *Loc))
1830 // The spill location is not the location of a debug value.
1831 continue;
1832 LLVM_DEBUG(dbgs() << "Restoring Register " << printReg(Reg, TRI) << '('
1833 << VL.Var.getVariable()->getName() << ")\n");
1834 LocIdx = VL.getSpillLocIdx(SpillLocation: *Loc);
1835 }
1836 VarLoc::MachineLoc MLoc = VL.Locs[LocIdx];
1837 insertTransferDebugPair(MI, OpenRanges, Transfers, VarLocIDs, OldVarID: Idx, Kind: TKind,
1838 OldLoc: MLoc, NewReg: Reg);
1839 // FIXME: A comment should explain why it's correct to return early here,
1840 // if that is in fact correct.
1841 return;
1842 }
1843}
1844
1845/// If \p MI is a register copy instruction, that copies a previously tracked
1846/// value from one register to another register that is callee saved, we
1847/// create new DBG_VALUE instruction described with copy destination register.
1848void VarLocBasedLDV::transferRegisterCopy(MachineInstr &MI,
1849 OpenRangesSet &OpenRanges,
1850 VarLocMap &VarLocIDs,
1851 TransferMap &Transfers) {
1852 auto DestSrc = TII->isCopyLikeInstr(MI);
1853 if (!DestSrc)
1854 return;
1855
1856 const MachineOperand *DestRegOp = DestSrc->Destination;
1857 const MachineOperand *SrcRegOp = DestSrc->Source;
1858
1859 if (!DestRegOp->isDef())
1860 return;
1861
1862 auto isCalleeSavedReg = [&](Register Reg) {
1863 for (MCRegAliasIterator RAI(Reg, TRI, true); RAI.isValid(); ++RAI)
1864 if (CalleeSavedRegs.test(Idx: (*RAI).id()))
1865 return true;
1866 return false;
1867 };
1868
1869 Register SrcReg = SrcRegOp->getReg();
1870 Register DestReg = DestRegOp->getReg();
1871
1872 // We want to recognize instructions where destination register is callee
1873 // saved register. If register that could be clobbered by the call is
1874 // included, there would be a great chance that it is going to be clobbered
1875 // soon. It is more likely that previous register location, which is callee
1876 // saved, is going to stay unclobbered longer, even if it is killed.
1877 if (!isCalleeSavedReg(DestReg))
1878 return;
1879
1880 // Remember an entry value movement. If we encounter a new debug value of
1881 // a parameter describing only a moving of the value around, rather then
1882 // modifying it, we are still able to use the entry value if needed.
1883 if (isRegOtherThanSPAndFP(Op: *DestRegOp, MI, TRI)) {
1884 for (uint64_t ID : OpenRanges.getEntryValueBackupVarLocs()) {
1885 LocIndex Idx = LocIndex::fromRawInteger(ID);
1886 const VarLoc &VL = VarLocIDs[Idx];
1887 if (VL.isEntryValueBackupReg(Reg: SrcReg)) {
1888 LLVM_DEBUG(dbgs() << "Copy of the entry value: "; MI.dump(););
1889 VarLoc EntryValLocCopyBackup =
1890 VarLoc::CreateEntryCopyBackupLoc(MI: VL.MI, EntryExpr: VL.Expr, NewReg: DestReg);
1891 // Stop tracking the original entry value.
1892 OpenRanges.erase(VL);
1893
1894 // Start tracking the entry value copy.
1895 LocIndices EntryValCopyLocIDs = VarLocIDs.insert(VL: EntryValLocCopyBackup);
1896 OpenRanges.insert(VarLocIDs: EntryValCopyLocIDs, VL: EntryValLocCopyBackup);
1897 break;
1898 }
1899 }
1900 }
1901
1902 if (!SrcRegOp->isKill())
1903 return;
1904
1905 for (uint64_t ID : OpenRanges.getRegisterVarLocs(Reg: SrcReg)) {
1906 LocIndex Idx = LocIndex::fromRawInteger(ID);
1907 assert(VarLocIDs[Idx].usesReg(SrcReg) && "Broken VarLocSet?");
1908 VarLoc::MachineLocValue Loc;
1909 Loc.RegNo = SrcReg;
1910 VarLoc::MachineLoc MLoc{.Kind: VarLoc::MachineLocKind::RegisterKind, .Value: Loc};
1911 insertTransferDebugPair(MI, OpenRanges, Transfers, VarLocIDs, OldVarID: Idx,
1912 Kind: TransferKind::TransferCopy, OldLoc: MLoc, NewReg: DestReg);
1913 // FIXME: A comment should explain why it's correct to return early here,
1914 // if that is in fact correct.
1915 return;
1916 }
1917}
1918
1919/// Terminate all open ranges at the end of the current basic block.
1920bool VarLocBasedLDV::transferTerminator(MachineBasicBlock *CurMBB,
1921 OpenRangesSet &OpenRanges,
1922 VarLocInMBB &OutLocs,
1923 const VarLocMap &VarLocIDs) {
1924 bool Changed = false;
1925 LLVM_DEBUG({
1926 VarVec VarLocs;
1927 OpenRanges.getUniqueVarLocs(VarLocs, VarLocIDs);
1928 for (VarLoc &VL : VarLocs) {
1929 // Copy OpenRanges to OutLocs, if not already present.
1930 dbgs() << "Add to OutLocs in MBB #" << CurMBB->getNumber() << ": ";
1931 VL.dump(TRI, TII);
1932 }
1933 });
1934 VarLocSet &VLS = getVarLocsInMBB(MBB: CurMBB, Locs&: OutLocs);
1935 Changed = VLS != OpenRanges.getVarLocs();
1936 // New OutLocs set may be different due to spill, restore or register
1937 // copy instruction processing.
1938 if (Changed)
1939 VLS = OpenRanges.getVarLocs();
1940 OpenRanges.clear();
1941 return Changed;
1942}
1943
1944/// Accumulate a mapping between each DILocalVariable fragment and other
1945/// fragments of that DILocalVariable which overlap. This reduces work during
1946/// the data-flow stage from "Find any overlapping fragments" to "Check if the
1947/// known-to-overlap fragments are present".
1948/// \param MI A previously unprocessed DEBUG_VALUE instruction to analyze for
1949/// fragment usage.
1950/// \param SeenFragments Map from DILocalVariable to all fragments of that
1951/// Variable which are known to exist.
1952/// \param OverlappingFragments The overlap map being constructed, from one
1953/// Var/Fragment pair to a vector of fragments known to overlap.
1954void VarLocBasedLDV::accumulateFragmentMap(MachineInstr &MI,
1955 VarToFragments &SeenFragments,
1956 OverlapMap &OverlappingFragments) {
1957 DebugVariable MIVar(MI.getDebugVariable(), MI.getDebugExpression(),
1958 MI.getDebugLoc()->getInlinedAt());
1959 FragmentInfo ThisFragment = MIVar.getFragmentOrDefault();
1960
1961 // If this is the first sighting of this variable, then we are guaranteed
1962 // there are currently no overlapping fragments either. Initialize the set
1963 // of seen fragments, record no overlaps for the current one, and return.
1964 auto [SeenIt, Inserted] = SeenFragments.try_emplace(Key: MIVar.getVariable());
1965 if (Inserted) {
1966 SeenIt->second.insert(V: ThisFragment);
1967
1968 OverlappingFragments.insert(KV: {{MIVar.getVariable(), ThisFragment}, {}});
1969 return;
1970 }
1971
1972 // If this particular Variable/Fragment pair already exists in the overlap
1973 // map, it has already been accounted for.
1974 auto IsInOLapMap =
1975 OverlappingFragments.insert(KV: {{MIVar.getVariable(), ThisFragment}, {}});
1976 if (!IsInOLapMap.second)
1977 return;
1978
1979 auto &ThisFragmentsOverlaps = IsInOLapMap.first->second;
1980 auto &AllSeenFragments = SeenIt->second;
1981
1982 // Otherwise, examine all other seen fragments for this variable, with "this"
1983 // fragment being a previously unseen fragment. Record any pair of
1984 // overlapping fragments.
1985 for (const auto &ASeenFragment : AllSeenFragments) {
1986 // Does this previously seen fragment overlap?
1987 if (DIExpression::fragmentsOverlap(A: ThisFragment, B: ASeenFragment)) {
1988 // Yes: Mark the current fragment as being overlapped.
1989 ThisFragmentsOverlaps.push_back(Elt: ASeenFragment);
1990 // Mark the previously seen fragment as being overlapped by the current
1991 // one.
1992 auto ASeenFragmentsOverlaps =
1993 OverlappingFragments.find(Val: {MIVar.getVariable(), ASeenFragment});
1994 assert(ASeenFragmentsOverlaps != OverlappingFragments.end() &&
1995 "Previously seen var fragment has no vector of overlaps");
1996 ASeenFragmentsOverlaps->second.push_back(Elt: ThisFragment);
1997 }
1998 }
1999
2000 AllSeenFragments.insert(V: ThisFragment);
2001}
2002
2003/// This routine creates OpenRanges.
2004void VarLocBasedLDV::process(MachineInstr &MI, OpenRangesSet &OpenRanges,
2005 VarLocMap &VarLocIDs, TransferMap &Transfers,
2006 InstToEntryLocMap &EntryValTransfers,
2007 RegDefToInstMap &RegSetInstrs) {
2008 if (!MI.isDebugInstr())
2009 LastNonDbgMI = &MI;
2010 transferDebugValue(MI, OpenRanges, VarLocIDs, EntryValTransfers,
2011 RegSetInstrs);
2012 transferRegisterDef(MI, OpenRanges, VarLocIDs, EntryValTransfers,
2013 RegSetInstrs);
2014 transferWasmDef(MI, OpenRanges, VarLocIDs);
2015 transferRegisterCopy(MI, OpenRanges, VarLocIDs, Transfers);
2016 transferSpillOrRestoreInst(MI, OpenRanges, VarLocIDs, Transfers);
2017}
2018
2019/// This routine joins the analysis results of all incoming edges in @MBB by
2020/// inserting a new DBG_VALUE instruction at the start of the @MBB - if the same
2021/// source variable in all the predecessors of @MBB reside in the same location.
2022bool VarLocBasedLDV::join(
2023 MachineBasicBlock &MBB, VarLocInMBB &OutLocs, VarLocInMBB &InLocs,
2024 const VarLocMap &VarLocIDs,
2025 SmallPtrSet<const MachineBasicBlock *, 16> &Visited,
2026 SmallPtrSetImpl<const MachineBasicBlock *> &ArtificialBlocks) {
2027 LLVM_DEBUG(dbgs() << "join MBB: " << MBB.getNumber() << "\n");
2028
2029 VarLocSet InLocsT(Alloc); // Temporary incoming locations.
2030
2031 // For all predecessors of this MBB, find the set of VarLocs that
2032 // can be joined.
2033 int NumVisited = 0;
2034 for (auto *p : MBB.predecessors()) {
2035 // Ignore backedges if we have not visited the predecessor yet. As the
2036 // predecessor hasn't yet had locations propagated into it, most locations
2037 // will not yet be valid, so treat them as all being uninitialized and
2038 // potentially valid. If a location guessed to be correct here is
2039 // invalidated later, we will remove it when we revisit this block.
2040 if (!Visited.count(Ptr: p)) {
2041 LLVM_DEBUG(dbgs() << " ignoring unvisited pred MBB: " << p->getNumber()
2042 << "\n");
2043 continue;
2044 }
2045 auto OL = OutLocs.find(Val: p);
2046 // Join is null in case of empty OutLocs from any of the pred.
2047 if (OL == OutLocs.end())
2048 return false;
2049
2050 // Just copy over the Out locs to incoming locs for the first visited
2051 // predecessor, and for all other predecessors join the Out locs.
2052 VarLocSet &OutLocVLS = *OL->second;
2053 if (!NumVisited)
2054 InLocsT = OutLocVLS;
2055 else
2056 InLocsT &= OutLocVLS;
2057
2058 LLVM_DEBUG({
2059 if (!InLocsT.empty()) {
2060 VarVec VarLocs;
2061 collectAllVarLocs(VarLocs, InLocsT, VarLocIDs);
2062 for (const VarLoc &VL : VarLocs)
2063 dbgs() << " gathered candidate incoming var: "
2064 << VL.Var.getVariable()->getName() << "\n";
2065 }
2066 });
2067
2068 NumVisited++;
2069 }
2070
2071 // Filter out DBG_VALUES that are out of scope.
2072 VarLocSet KillSet(Alloc);
2073 bool IsArtificial = ArtificialBlocks.count(Ptr: &MBB);
2074 if (!IsArtificial) {
2075 for (uint64_t ID : InLocsT) {
2076 LocIndex Idx = LocIndex::fromRawInteger(ID);
2077 if (!VarLocIDs[Idx].dominates(LS, MBB)) {
2078 KillSet.set(ID);
2079 LLVM_DEBUG({
2080 auto Name = VarLocIDs[Idx].Var.getVariable()->getName();
2081 dbgs() << " killing " << Name << ", it doesn't dominate MBB\n";
2082 });
2083 }
2084 }
2085 }
2086 InLocsT.intersectWithComplement(Other: KillSet);
2087
2088 // As we are processing blocks in reverse post-order we
2089 // should have processed at least one predecessor, unless it
2090 // is the entry block which has no predecessor.
2091 assert((NumVisited || MBB.pred_empty()) &&
2092 "Should have processed at least one predecessor");
2093
2094 VarLocSet &ILS = getVarLocsInMBB(MBB: &MBB, Locs&: InLocs);
2095 bool Changed = false;
2096 if (ILS != InLocsT) {
2097 ILS = InLocsT;
2098 Changed = true;
2099 }
2100
2101 return Changed;
2102}
2103
2104void VarLocBasedLDV::flushPendingLocs(VarLocInMBB &PendingInLocs,
2105 VarLocMap &VarLocIDs) {
2106 // PendingInLocs records all locations propagated into blocks, which have
2107 // not had DBG_VALUE insts created. Go through and create those insts now.
2108 for (auto &Iter : PendingInLocs) {
2109 // Map is keyed on a constant pointer, unwrap it so we can insert insts.
2110 auto &MBB = const_cast<MachineBasicBlock &>(*Iter.first);
2111 VarLocSet &Pending = *Iter.second;
2112
2113 SmallVector<VarLoc, 32> VarLocs;
2114 collectAllVarLocs(Collected&: VarLocs, CollectFrom: Pending, VarLocIDs);
2115
2116 for (VarLoc DiffIt : VarLocs) {
2117 // The ID location is live-in to MBB -- work out what kind of machine
2118 // location it is and create a DBG_VALUE.
2119 if (DiffIt.isEntryBackupLoc())
2120 continue;
2121 MachineInstr *MI = DiffIt.BuildDbgValue(MF&: *MBB.getParent());
2122 MBB.insert(I: MBB.instr_begin(), M: MI);
2123
2124 (void)MI;
2125 LLVM_DEBUG(dbgs() << "Inserted: "; MI->dump(););
2126 }
2127 }
2128}
2129
2130bool VarLocBasedLDV::isEntryValueCandidate(
2131 const MachineInstr &MI, const DefinedRegsSet &DefinedRegs) const {
2132 assert(MI.isDebugValue() && "This must be DBG_VALUE.");
2133
2134 // TODO: Add support for local variables that are expressed in terms of
2135 // parameters entry values.
2136 // TODO: Add support for modified arguments that can be expressed
2137 // by using its entry value.
2138 auto *DIVar = MI.getDebugVariable();
2139 if (!DIVar->isParameter())
2140 return false;
2141
2142 // Do not consider parameters that belong to an inlined function.
2143 if (MI.getDebugLoc()->getInlinedAt())
2144 return false;
2145
2146 // Only consider parameters that are described using registers. Parameters
2147 // that are passed on the stack are not yet supported, so ignore debug
2148 // values that are described by the frame or stack pointer.
2149 if (!isRegOtherThanSPAndFP(Op: MI.getDebugOperand(Index: 0), MI, TRI))
2150 return false;
2151
2152 // If a parameter's value has been propagated from the caller, then the
2153 // parameter's DBG_VALUE may be described using a register defined by some
2154 // instruction in the entry block, in which case we shouldn't create an
2155 // entry value.
2156 if (DefinedRegs.count(V: MI.getDebugOperand(Index: 0).getReg()))
2157 return false;
2158
2159 // TODO: Add support for parameters that have a pre-existing debug expressions
2160 // (e.g. fragments).
2161 // A simple deref expression is equivalent to an indirect debug value.
2162 const DIExpression *Expr = MI.getDebugExpression();
2163 if (Expr->getNumElements() > 0 && !Expr->isDeref())
2164 return false;
2165
2166 return true;
2167}
2168
2169/// Collect all register defines (including aliases) for the given instruction.
2170static void collectRegDefs(const MachineInstr &MI, DefinedRegsSet &Regs,
2171 const TargetRegisterInfo *TRI) {
2172 for (const MachineOperand &MO : MI.all_defs()) {
2173 if (MO.getReg() && MO.getReg().isPhysical()) {
2174 Regs.insert(V: MO.getReg());
2175 for (MCRegAliasIterator AI(MO.getReg(), TRI, true); AI.isValid(); ++AI)
2176 Regs.insert(V: *AI);
2177 }
2178 }
2179}
2180
2181/// This routine records the entry values of function parameters. The values
2182/// could be used as backup values. If we loose the track of some unmodified
2183/// parameters, the backup values will be used as a primary locations.
2184void VarLocBasedLDV::recordEntryValue(const MachineInstr &MI,
2185 const DefinedRegsSet &DefinedRegs,
2186 OpenRangesSet &OpenRanges,
2187 VarLocMap &VarLocIDs) {
2188 if (!ShouldEmitDebugEntryValues)
2189 return;
2190
2191 DebugVariable V(MI.getDebugVariable(), MI.getDebugExpression(),
2192 MI.getDebugLoc()->getInlinedAt());
2193
2194 if (!isEntryValueCandidate(MI, DefinedRegs) ||
2195 OpenRanges.getEntryValueBackup(Var: V))
2196 return;
2197
2198 LLVM_DEBUG(dbgs() << "Creating the backup entry location: "; MI.dump(););
2199
2200 // Create the entry value and use it as a backup location until it is
2201 // valid. It is valid until a parameter is not changed.
2202 DIExpression *NewExpr =
2203 DIExpression::prepend(Expr: MI.getDebugExpression(), Flags: DIExpression::EntryValue);
2204 VarLoc EntryValLocAsBackup = VarLoc::CreateEntryBackupLoc(MI, EntryExpr: NewExpr);
2205 LocIndices EntryValLocIDs = VarLocIDs.insert(VL: EntryValLocAsBackup);
2206 OpenRanges.insert(VarLocIDs: EntryValLocIDs, VL: EntryValLocAsBackup);
2207}
2208
2209/// Calculate the liveness information for the given machine function and
2210/// extend ranges across basic blocks.
2211bool VarLocBasedLDV::ExtendRanges(MachineFunction &MF,
2212 MachineDominatorTree *DomTree,
2213 bool ShouldEmitDebugEntryValues,
2214 unsigned InputBBLimit,
2215 unsigned InputDbgValLimit) {
2216 (void)DomTree;
2217 LLVM_DEBUG(dbgs() << "\nDebug Range Extension: " << MF.getName() << "\n");
2218
2219 if (!MF.getFunction().getSubprogram())
2220 // VarLocBaseLDV will already have removed all DBG_VALUEs.
2221 return false;
2222
2223 // Skip functions from NoDebug compilation units.
2224 if (MF.getFunction().getSubprogram()->getUnit()->getEmissionKind() ==
2225 DICompileUnit::NoDebug)
2226 return false;
2227
2228 TRI = MF.getSubtarget().getRegisterInfo();
2229 TII = MF.getSubtarget().getInstrInfo();
2230 TFI = MF.getSubtarget().getFrameLowering();
2231 TFI->getCalleeSaves(MF, SavedRegs&: CalleeSavedRegs);
2232 this->ShouldEmitDebugEntryValues = ShouldEmitDebugEntryValues;
2233
2234 LS.initialize(MF);
2235
2236 bool Changed = false;
2237 bool OLChanged = false;
2238 bool MBBJoined = false;
2239
2240 VarLocMap VarLocIDs; // Map VarLoc<>unique ID for use in bitvectors.
2241 OverlapMap OverlapFragments; // Map of overlapping variable fragments.
2242 OpenRangesSet OpenRanges(Alloc, OverlapFragments);
2243 // Ranges that are open until end of bb.
2244 VarLocInMBB OutLocs; // Ranges that exist beyond bb.
2245 VarLocInMBB InLocs; // Ranges that are incoming after joining.
2246 TransferMap Transfers; // DBG_VALUEs associated with transfers (such as
2247 // spills, copies and restores).
2248 // Map responsible MI to attached Transfer emitted from Backup Entry Value.
2249 InstToEntryLocMap EntryValTransfers;
2250 // Map a Register to the last MI which clobbered it.
2251 RegDefToInstMap RegSetInstrs;
2252
2253 VarToFragments SeenFragments;
2254
2255 // Blocks which are artificial, i.e. blocks which exclusively contain
2256 // instructions without locations, or with line 0 locations.
2257 SmallPtrSet<const MachineBasicBlock *, 16> ArtificialBlocks;
2258
2259 DenseMap<unsigned int, MachineBasicBlock *> OrderToBB;
2260 DenseMap<MachineBasicBlock *, unsigned int> BBToOrder;
2261 std::priority_queue<unsigned int, std::vector<unsigned int>,
2262 std::greater<unsigned int>>
2263 Worklist;
2264 std::priority_queue<unsigned int, std::vector<unsigned int>,
2265 std::greater<unsigned int>>
2266 Pending;
2267
2268 // Set of register defines that are seen when traversing the entry block
2269 // looking for debug entry value candidates.
2270 DefinedRegsSet DefinedRegs;
2271
2272 // Only in the case of entry MBB collect DBG_VALUEs representing
2273 // function parameters in order to generate debug entry values for them.
2274 MachineBasicBlock &First_MBB = *(MF.begin());
2275 for (auto &MI : First_MBB) {
2276 collectRegDefs(MI, Regs&: DefinedRegs, TRI);
2277 if (MI.isDebugValue())
2278 recordEntryValue(MI, DefinedRegs, OpenRanges, VarLocIDs);
2279 }
2280
2281 // Initialize per-block structures and scan for fragment overlaps.
2282 for (auto &MBB : MF)
2283 for (auto &MI : MBB)
2284 if (MI.isDebugValue())
2285 accumulateFragmentMap(MI, SeenFragments, OverlappingFragments&: OverlapFragments);
2286
2287 auto hasNonArtificialLocation = [](const MachineInstr &MI) -> bool {
2288 if (const DebugLoc &DL = MI.getDebugLoc())
2289 return DL.getLine() != 0;
2290 return false;
2291 };
2292 for (auto &MBB : MF)
2293 if (none_of(Range: MBB.instrs(), P: hasNonArtificialLocation))
2294 ArtificialBlocks.insert(Ptr: &MBB);
2295
2296 LLVM_DEBUG(printVarLocInMBB(MF, OutLocs, VarLocIDs,
2297 "OutLocs after initialization", dbgs()));
2298
2299 ReversePostOrderTraversal<MachineFunction *> RPOT(&MF);
2300 unsigned int RPONumber = 0;
2301 for (MachineBasicBlock *MBB : RPOT) {
2302 OrderToBB[RPONumber] = MBB;
2303 BBToOrder[MBB] = RPONumber;
2304 Worklist.push(x: RPONumber);
2305 ++RPONumber;
2306 }
2307
2308 if (RPONumber > InputBBLimit) {
2309 unsigned NumInputDbgValues = 0;
2310 for (auto &MBB : MF)
2311 for (auto &MI : MBB)
2312 if (MI.isDebugValue())
2313 ++NumInputDbgValues;
2314 if (NumInputDbgValues > InputDbgValLimit) {
2315 LLVM_DEBUG(dbgs() << "Disabling VarLocBasedLDV: " << MF.getName()
2316 << " has " << RPONumber << " basic blocks and "
2317 << NumInputDbgValues
2318 << " input DBG_VALUEs, exceeding limits.\n");
2319 return false;
2320 }
2321 }
2322
2323 // This is a standard "union of predecessor outs" dataflow problem.
2324 // To solve it, we perform join() and process() using the two worklist method
2325 // until the ranges converge.
2326 // Ranges have converged when both worklists are empty.
2327 SmallPtrSet<const MachineBasicBlock *, 16> Visited;
2328 while (!Worklist.empty() || !Pending.empty()) {
2329 // We track what is on the pending worklist to avoid inserting the same
2330 // thing twice. We could avoid this with a custom priority queue, but this
2331 // is probably not worth it.
2332 SmallPtrSet<MachineBasicBlock *, 16> OnPending;
2333 LLVM_DEBUG(dbgs() << "Processing Worklist\n");
2334 while (!Worklist.empty()) {
2335 MachineBasicBlock *MBB = OrderToBB[Worklist.top()];
2336 Worklist.pop();
2337 MBBJoined = join(MBB&: *MBB, OutLocs, InLocs, VarLocIDs, Visited,
2338 ArtificialBlocks);
2339 MBBJoined |= Visited.insert(Ptr: MBB).second;
2340 if (MBBJoined) {
2341 MBBJoined = false;
2342 Changed = true;
2343 // Now that we have started to extend ranges across BBs we need to
2344 // examine spill, copy and restore instructions to see whether they
2345 // operate with registers that correspond to user variables.
2346 // First load any pending inlocs.
2347 OpenRanges.insertFromLocSet(ToLoad: getVarLocsInMBB(MBB, Locs&: InLocs), Map: VarLocIDs);
2348 LastNonDbgMI = nullptr;
2349 RegSetInstrs.clear();
2350 for (auto &MI : *MBB)
2351 process(MI, OpenRanges, VarLocIDs, Transfers, EntryValTransfers,
2352 RegSetInstrs);
2353 OLChanged |= transferTerminator(CurMBB: MBB, OpenRanges, OutLocs, VarLocIDs);
2354
2355 LLVM_DEBUG(printVarLocInMBB(MF, OutLocs, VarLocIDs,
2356 "OutLocs after propagating", dbgs()));
2357 LLVM_DEBUG(printVarLocInMBB(MF, InLocs, VarLocIDs,
2358 "InLocs after propagating", dbgs()));
2359
2360 if (OLChanged) {
2361 OLChanged = false;
2362 for (auto *s : MBB->successors())
2363 if (OnPending.insert(Ptr: s).second) {
2364 Pending.push(x: BBToOrder[s]);
2365 }
2366 }
2367 }
2368 }
2369 Worklist.swap(pq&: Pending);
2370 // At this point, pending must be empty, since it was just the empty
2371 // worklist
2372 assert(Pending.empty() && "Pending should be empty");
2373 }
2374
2375 // Add any DBG_VALUE instructions created by location transfers.
2376 for (auto &TR : Transfers) {
2377 assert(!TR.TransferInst->isTerminator() &&
2378 "Cannot insert DBG_VALUE after terminator");
2379 MachineBasicBlock *MBB = TR.TransferInst->getParent();
2380 const VarLoc &VL = VarLocIDs[TR.LocationID];
2381 MachineInstr *MI = VL.BuildDbgValue(MF);
2382 MBB->insertAfterBundle(I: TR.TransferInst->getIterator(), MI);
2383 }
2384 Transfers.clear();
2385
2386 // Add DBG_VALUEs created using Backup Entry Value location.
2387 for (auto &TR : EntryValTransfers) {
2388 MachineInstr *TRInst = const_cast<MachineInstr *>(TR.first);
2389 assert(!TRInst->isTerminator() &&
2390 "Cannot insert DBG_VALUE after terminator");
2391 MachineBasicBlock *MBB = TRInst->getParent();
2392 const VarLoc &VL = VarLocIDs[TR.second];
2393 MachineInstr *MI = VL.BuildDbgValue(MF);
2394 MBB->insertAfterBundle(I: TRInst->getIterator(), MI);
2395 }
2396 EntryValTransfers.clear();
2397
2398 // Deferred inlocs will not have had any DBG_VALUE insts created; do
2399 // that now.
2400 flushPendingLocs(PendingInLocs&: InLocs, VarLocIDs);
2401
2402 LLVM_DEBUG(printVarLocInMBB(MF, OutLocs, VarLocIDs, "Final OutLocs", dbgs()));
2403 LLVM_DEBUG(printVarLocInMBB(MF, InLocs, VarLocIDs, "Final InLocs", dbgs()));
2404 return Changed;
2405}
2406
2407LDVImpl *
2408llvm::makeVarLocBasedLiveDebugValues()
2409{
2410 return new VarLocBasedLDV();
2411}
2412