1//===-- LiveRangeEdit.cpp - Basic tools for editing a register live range -===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8//
9// The LiveRangeEdit class represents changes done to a virtual register when it
10// is spilled or split.
11//===----------------------------------------------------------------------===//
12
13#include "llvm/CodeGen/LiveRangeEdit.h"
14#include "llvm/ADT/Statistic.h"
15#include "llvm/CodeGen/CalcSpillWeights.h"
16#include "llvm/CodeGen/LiveIntervals.h"
17#include "llvm/CodeGen/MachineRegisterInfo.h"
18#include "llvm/CodeGen/TargetInstrInfo.h"
19#include "llvm/CodeGen/VirtRegMap.h"
20#include "llvm/Support/Debug.h"
21#include "llvm/Support/raw_ostream.h"
22
23using namespace llvm;
24
25#define DEBUG_TYPE "regalloc"
26
27STATISTIC(NumDCEDeleted, "Number of instructions deleted by DCE");
28STATISTIC(NumDCEFoldedLoads, "Number of single use loads folded after DCE");
29STATISTIC(NumFracRanges, "Number of live ranges fractured by DCE");
30STATISTIC(NumReMaterialization, "Number of instructions rematerialized");
31
32void LiveRangeEdit::Delegate::anchor() { }
33
34LiveInterval &LiveRangeEdit::createEmptyIntervalFrom(Register OldReg,
35 bool createSubRanges) {
36 Register VReg = MRI.cloneVirtualRegister(VReg: OldReg);
37 if (VRM)
38 VRM->setIsSplitFromReg(virtReg: VReg, SReg: VRM->getOriginal(VirtReg: OldReg));
39
40 LiveInterval &LI = LIS.createEmptyInterval(Reg: VReg);
41 if (Parent && !Parent->isSpillable())
42 LI.markNotSpillable();
43 if (createSubRanges) {
44 // Create empty subranges if the OldReg's interval has them. Do not create
45 // the main range here---it will be constructed later after the subranges
46 // have been finalized.
47 LiveInterval &OldLI = LIS.getInterval(Reg: OldReg);
48 VNInfo::Allocator &Alloc = LIS.getVNInfoAllocator();
49 for (LiveInterval::SubRange &S : OldLI.subranges())
50 LI.createSubRange(Allocator&: Alloc, LaneMask: S.LaneMask);
51 }
52 return LI;
53}
54
55Register LiveRangeEdit::createFrom(Register OldReg) {
56 Register VReg = MRI.cloneVirtualRegister(VReg: OldReg);
57 if (VRM) {
58 VRM->setIsSplitFromReg(virtReg: VReg, SReg: VRM->getOriginal(VirtReg: OldReg));
59 }
60 // FIXME: Getting the interval here actually computes it.
61 // In theory, this may not be what we want, but in practice
62 // the createEmptyIntervalFrom API is used when this is not
63 // the case. Generally speaking we just want to annotate the
64 // LiveInterval when it gets created but we cannot do that at
65 // the moment.
66 if (Parent && !Parent->isSpillable())
67 LIS.getInterval(Reg: VReg).markNotSpillable();
68 return VReg;
69}
70
71bool LiveRangeEdit::canRematerializeAt(Remat &RM, SlotIndex UseIdx) {
72 assert(RM.OrigMI && "No defining instruction for remattable value");
73
74 if (!TII.isReMaterializable(MI: *RM.OrigMI))
75 return false;
76
77 // Verify that all used registers are available with the same values.
78 if (!VirtRegAuxInfo::allUsesAvailableAt(MI: RM.OrigMI, UseIdx, LIS, MRI, TII))
79 return false;
80
81 return true;
82}
83
84SlotIndex LiveRangeEdit::rematerializeAt(MachineBasicBlock &MBB,
85 MachineBasicBlock::iterator MI,
86 Register DestReg, const Remat &RM,
87 const TargetRegisterInfo &tri,
88 bool Late, unsigned SubIdx,
89 MachineInstr *ReplaceIndexMI) {
90 assert(RM.OrigMI && "Invalid remat");
91 TII.reMaterialize(MBB, MI, DestReg, SubIdx, Orig: *RM.OrigMI);
92 // DestReg of the cloned instruction cannot be Dead. Set isDead of DestReg
93 // to false anyway in case the isDead flag of RM.OrigMI's dest register
94 // is true.
95 (*--MI).clearRegisterDeads(Reg: DestReg);
96 Rematted.insert(Ptr: RM.ParentVNI);
97 ++NumReMaterialization;
98
99 bool EarlyClobber = MI->getOperand(i: 0).isEarlyClobber();
100 if (ReplaceIndexMI)
101 return LIS.ReplaceMachineInstrInMaps(MI&: *ReplaceIndexMI, NewMI&: *MI)
102 .getRegSlot(EC: EarlyClobber);
103 return LIS.getSlotIndexes()->insertMachineInstrInMaps(MI&: *MI, Late).getRegSlot(
104 EC: EarlyClobber);
105}
106
107void LiveRangeEdit::eraseVirtReg(Register Reg) {
108 if (TheDelegate && TheDelegate->LRE_CanEraseVirtReg(Reg))
109 LIS.removeInterval(Reg);
110}
111
112bool LiveRangeEdit::foldAsLoad(LiveInterval *LI,
113 SmallVectorImpl<MachineInstr*> &Dead) {
114 MachineInstr *DefMI = nullptr, *UseMI = nullptr;
115
116 // Check that there is a single def and a single use.
117 for (MachineOperand &MO : MRI.reg_nodbg_operands(Reg: LI->reg())) {
118 MachineInstr *MI = MO.getParent();
119 if (MO.isDef()) {
120 if (DefMI && DefMI != MI)
121 return false;
122 if (!MI->canFoldAsLoad())
123 return false;
124 DefMI = MI;
125 } else if (!MO.isUndef()) {
126 if (UseMI && UseMI != MI)
127 return false;
128 // FIXME: Targets don't know how to fold subreg uses.
129 if (MO.getSubReg())
130 return false;
131 UseMI = MI;
132 }
133 }
134 if (!DefMI || !UseMI)
135 return false;
136
137 // Since we're moving the DefMI load, make sure we're not extending any live
138 // ranges.
139 if (!VirtRegAuxInfo::allUsesAvailableAt(
140 MI: DefMI, UseIdx: LIS.getInstructionIndex(Instr: *UseMI), LIS, MRI, TII))
141 return false;
142
143 // We also need to make sure it is safe to move the load.
144 // Assume there are stores between DefMI and UseMI.
145 bool SawStore = true;
146 if (!DefMI->isSafeToMove(SawStore))
147 return false;
148
149 LLVM_DEBUG(dbgs() << "Try to fold single def: " << *DefMI
150 << " into single use: " << *UseMI);
151
152 SmallVector<unsigned, 8> Ops;
153 if (UseMI->readsWritesVirtualRegister(Reg: LI->reg(), Ops: &Ops).second)
154 return false;
155
156 MachineInstr *FoldMI = TII.foldMemoryOperand(MI&: *UseMI, Ops, LoadMI&: *DefMI, LIS: &LIS);
157 if (!FoldMI)
158 return false;
159 LLVM_DEBUG(dbgs() << " folded: " << *FoldMI);
160 LIS.ReplaceMachineInstrInMaps(MI&: *UseMI, NewMI&: *FoldMI);
161 // Update the call info.
162 if (UseMI->shouldUpdateAdditionalCallInfo())
163 UseMI->getMF()->moveAdditionalCallInfo(Old: UseMI, New: FoldMI);
164 UseMI->eraseFromParent();
165 DefMI->addRegisterDead(Reg: LI->reg(), RegInfo: nullptr);
166 Dead.push_back(Elt: DefMI);
167 ++NumDCEFoldedLoads;
168 return true;
169}
170
171bool LiveRangeEdit::useIsKill(const LiveInterval &LI,
172 const MachineOperand &MO) const {
173 const MachineInstr &MI = *MO.getParent();
174 SlotIndex Idx = LIS.getInstructionIndex(Instr: MI).getRegSlot();
175 if (LI.Query(Idx).isKill())
176 return true;
177 const TargetRegisterInfo &TRI = *MRI.getTargetRegisterInfo();
178 unsigned SubReg = MO.getSubReg();
179 LaneBitmask LaneMask = TRI.getSubRegIndexLaneMask(SubIdx: SubReg);
180 for (const LiveInterval::SubRange &S : LI.subranges()) {
181 if ((S.LaneMask & LaneMask).any() && S.Query(Idx).isKill())
182 return true;
183 }
184 return false;
185}
186
187/// Find all live intervals that need to shrink, then remove the instruction.
188void LiveRangeEdit::eliminateDeadDef(MachineInstr *MI, ToShrinkSet &ToShrink) {
189 assert(MI->allDefsAreDead() && "Def isn't really dead");
190 SlotIndex Idx = LIS.getInstructionIndex(Instr: *MI).getRegSlot();
191
192 // Never delete a bundled instruction.
193 if (MI->isBundled()) {
194 // TODO: Handle deleting copy bundles
195 LLVM_DEBUG(dbgs() << "Won't delete dead bundled inst: " << Idx << '\t'
196 << *MI);
197 return;
198 }
199
200 // Never delete inline asm.
201 if (MI->isInlineAsm()) {
202 LLVM_DEBUG(dbgs() << "Won't delete: " << Idx << '\t' << *MI);
203 return;
204 }
205
206 // Use the same criteria as DeadMachineInstructionElim.
207 bool SawStore = false;
208 if (!MI->isSafeToMove(SawStore)) {
209 LLVM_DEBUG(dbgs() << "Can't delete: " << Idx << '\t' << *MI);
210 return;
211 }
212
213 LLVM_DEBUG(dbgs() << "Deleting dead def " << Idx << '\t' << *MI);
214
215 // Collect virtual registers to be erased after MI is gone.
216 SmallVector<Register, 8> RegsToErase;
217 bool ReadsPhysRegs = false;
218 bool isOrigDef = false;
219 Register Dest;
220 unsigned DestSubReg;
221 // Only optimize rematerialize case when the instruction has one def, since
222 // otherwise we could leave some dead defs in the code. This case is
223 // extremely rare.
224 if (VRM && MI->getOperand(i: 0).isReg() && MI->getOperand(i: 0).isDef() &&
225 MI->getDesc().getNumDefs() == 1) {
226 Dest = MI->getOperand(i: 0).getReg();
227 DestSubReg = MI->getOperand(i: 0).getSubReg();
228 Register Original = VRM->getOriginal(VirtReg: Dest);
229 LiveInterval &OrigLI = LIS.getInterval(Reg: Original);
230 VNInfo *OrigVNI = OrigLI.getVNInfoAt(Idx);
231 // The original live-range may have been shrunk to
232 // an empty live-range. It happens when it is dead, but
233 // we still keep it around to be able to rematerialize
234 // other values that depend on it.
235 if (OrigVNI)
236 isOrigDef = SlotIndex::isSameInstr(A: OrigVNI->def, B: Idx);
237 }
238
239 bool HasLiveVRegUses = false;
240
241 // Check for live intervals that may shrink
242 for (const MachineOperand &MO : MI->operands()) {
243 if (!MO.isReg())
244 continue;
245 Register Reg = MO.getReg();
246 if (!Reg.isVirtual()) {
247 // Check if MI reads any unreserved physregs.
248 if (Reg && MO.readsReg() && !MRI.isReserved(PhysReg: Reg))
249 ReadsPhysRegs = true;
250 else if (MO.isDef())
251 LIS.removePhysRegDefAt(Reg: Reg.asMCReg(), Pos: Idx);
252 continue;
253 }
254 LiveInterval &LI = LIS.getInterval(Reg);
255
256 // Shrink read registers, unless it is likely to be expensive and
257 // unlikely to change anything. We typically don't want to shrink the
258 // PIC base register that has lots of uses everywhere.
259 // Always shrink COPY uses that probably come from live range splitting.
260 if ((MI->readsVirtualRegister(Reg) &&
261 (MO.isDef() || TII.isCopyInstr(MI: *MI))) ||
262 (MO.readsReg() && (MRI.hasOneNonDBGUse(RegNo: Reg) || useIsKill(LI, MO))))
263 ToShrink.insert(X: &LI);
264 else if (MO.readsReg())
265 HasLiveVRegUses = true;
266
267 // Remove defined value.
268 if (MO.isDef()) {
269 if (TheDelegate && LI.getVNInfoAt(Idx) != nullptr)
270 TheDelegate->LRE_WillShrinkVirtReg(LI.reg());
271 LIS.removeVRegDefAt(LI, Pos: Idx);
272 if (LI.empty())
273 RegsToErase.push_back(Elt: Reg);
274 }
275 }
276
277 // If the dest of MI is an original reg and MI is reMaterializable,
278 // don't delete the inst. Replace the dest with a new reg, and keep
279 // the inst for remat of other siblings. The inst is saved in
280 // LiveRangeEdit::DeadRemats and will be deleted after all the
281 // allocations of the func are done. Note that if we keep the
282 // instruction with the original operands, that handles the physreg
283 // operand case (described just below) as well.
284 // However, immediately delete instructions which have unshrunk virtual
285 // register uses. That may provoke RA to split an interval at the KILL
286 // and later result in an invalid live segment end.
287 if (isOrigDef && DeadRemats && !HasLiveVRegUses &&
288 TII.isReMaterializable(MI: *MI)) {
289 LiveInterval &NewLI = createEmptyIntervalFrom(OldReg: Dest, createSubRanges: false);
290 VNInfo::Allocator &Alloc = LIS.getVNInfoAllocator();
291 VNInfo *VNI = NewLI.getNextValue(Def: Idx, VNInfoAllocator&: Alloc);
292 NewLI.addSegment(S: LiveInterval::Segment(Idx, Idx.getDeadSlot(), VNI));
293
294 if (DestSubReg) {
295 const TargetRegisterInfo *TRI = MRI.getTargetRegisterInfo();
296 auto *SR =
297 NewLI.createSubRange(Allocator&: Alloc, LaneMask: TRI->getSubRegIndexLaneMask(SubIdx: DestSubReg));
298 SR->addSegment(S: LiveInterval::Segment(Idx, Idx.getDeadSlot(),
299 SR->getNextValue(Def: Idx, VNInfoAllocator&: Alloc)));
300 }
301
302 pop_back();
303 DeadRemats->insert(Ptr: MI);
304 const TargetRegisterInfo &TRI = *MRI.getTargetRegisterInfo();
305 MI->substituteRegister(FromReg: Dest, ToReg: NewLI.reg(), SubIdx: 0, RegInfo: TRI);
306 assert(MI->registerDefIsDead(NewLI.reg(), &TRI));
307 }
308 // Currently, we don't support DCE of physreg live ranges. If MI reads
309 // any unreserved physregs, don't erase the instruction, but turn it into
310 // a KILL instead. This way, the physreg live ranges don't end up
311 // dangling.
312 // FIXME: It would be better to have something like shrinkToUses() for
313 // physregs. That could potentially enable more DCE and it would free up
314 // the physreg. It would not happen often, though.
315 else if (ReadsPhysRegs) {
316 MI->setDesc(TII.get(Opcode: TargetOpcode::KILL));
317 // Remove all operands that aren't physregs.
318 for (unsigned i = MI->getNumOperands(); i; --i) {
319 const MachineOperand &MO = MI->getOperand(i: i-1);
320 if (MO.isReg() && MO.getReg().isPhysical())
321 continue;
322 MI->removeOperand(OpNo: i-1);
323 }
324 MI->dropMemRefs(MF&: *MI->getMF());
325 LLVM_DEBUG(dbgs() << "Converted physregs to:\t" << *MI);
326 } else {
327 if (TheDelegate)
328 TheDelegate->LRE_WillEraseInstruction(MI);
329 LIS.RemoveMachineInstrFromMaps(MI&: *MI);
330 MI->eraseFromParent();
331 ++NumDCEDeleted;
332 }
333
334 // Erase any virtregs that are now empty and unused. There may be <undef>
335 // uses around. Keep the empty live range in that case.
336 for (Register Reg : RegsToErase) {
337 if (LIS.hasInterval(Reg) && MRI.reg_nodbg_empty(RegNo: Reg)) {
338 ToShrink.remove(X: &LIS.getInterval(Reg));
339 eraseVirtReg(Reg);
340 }
341 }
342}
343
344void LiveRangeEdit::eliminateDeadDefs(SmallVectorImpl<MachineInstr *> &Dead,
345 ArrayRef<Register> RegsBeingSpilled) {
346 ToShrinkSet ToShrink;
347
348 for (;;) {
349 // Erase all dead defs.
350 while (!Dead.empty())
351 eliminateDeadDef(MI: Dead.pop_back_val(), ToShrink);
352
353 if (ToShrink.empty())
354 break;
355
356 // Shrink just one live interval. Then delete new dead defs.
357 LiveInterval *LI = ToShrink.pop_back_val();
358 if (foldAsLoad(LI, Dead))
359 continue;
360 Register VReg = LI->reg();
361 if (TheDelegate)
362 TheDelegate->LRE_WillShrinkVirtReg(VReg);
363 if (!LIS.shrinkToUses(li: LI, dead: &Dead))
364 continue;
365
366 // Don't create new intervals for a register being spilled.
367 // The new intervals would have to be spilled anyway so its not worth it.
368 // Also they currently aren't spilled so creating them and not spilling
369 // them results in incorrect code.
370 if (llvm::is_contained(Range&: RegsBeingSpilled, Element: VReg))
371 continue;
372
373 // LI may have been separated, create new intervals.
374 LI->RenumberValues();
375 SmallVector<LiveInterval*, 8> SplitLIs;
376 LIS.splitSeparateComponents(LI&: *LI, SplitLIs);
377 if (!SplitLIs.empty())
378 ++NumFracRanges;
379
380 Register Original = VRM ? VRM->getOriginal(VirtReg: VReg) : Register();
381 for (const LiveInterval *SplitLI : SplitLIs) {
382 // If LI is an original interval that hasn't been split yet, make the new
383 // intervals their own originals instead of referring to LI. The original
384 // interval must contain all the split products, and LI doesn't.
385 if (Original != VReg && Original != 0)
386 VRM->setIsSplitFromReg(virtReg: SplitLI->reg(), SReg: Original);
387 if (TheDelegate)
388 TheDelegate->LRE_DidCloneVirtReg(New: SplitLI->reg(), Old: VReg);
389 }
390 }
391}
392
393// Keep track of new virtual registers created via
394// MachineRegisterInfo::createVirtualRegister.
395void
396LiveRangeEdit::MRI_NoteNewVirtualRegister(Register VReg) {
397 if (VRM)
398 VRM->grow();
399
400 NewRegs.push_back(Elt: VReg);
401}
402
403void LiveRangeEdit::calculateRegClassAndHint(MachineFunction &MF,
404 VirtRegAuxInfo &VRAI) {
405 for (unsigned I = 0, Size = size(); I < Size; ++I) {
406 LiveInterval &LI = LIS.getInterval(Reg: get(idx: I));
407 if (MRI.recomputeRegClass(Reg: LI.reg()))
408 LLVM_DEBUG({
409 const TargetRegisterInfo *TRI = MF.getSubtarget().getRegisterInfo();
410 dbgs() << "Inflated " << printReg(LI.reg()) << " to "
411 << TRI->getRegClassName(MRI.getRegClass(LI.reg())) << '\n';
412 });
413 VRAI.calculateSpillWeightAndHint(LI);
414 }
415}
416