1//===- LiveIntervalCalc.cpp - Calculate live interval --------------------===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8//
9// Implementation of the LiveIntervalCalc class.
10//
11//===----------------------------------------------------------------------===//
12
13#include "llvm/CodeGen/LiveIntervalCalc.h"
14#include "llvm/ADT/SmallVector.h"
15#include "llvm/CodeGen/LiveInterval.h"
16#include "llvm/CodeGen/MachineInstr.h"
17#include "llvm/CodeGen/MachineOperand.h"
18#include "llvm/CodeGen/MachineRegisterInfo.h"
19#include "llvm/CodeGen/SlotIndexes.h"
20#include "llvm/CodeGen/TargetRegisterInfo.h"
21#include "llvm/MC/LaneBitmask.h"
22#include <cassert>
23
24using namespace llvm;
25
26#define DEBUG_TYPE "regalloc"
27
28// Reserve an address that indicates a value that is known to be "undef".
29static VNInfo UndefVNI(0xbad, SlotIndex());
30
31static void createDeadDef(SlotIndexes &Indexes, VNInfo::Allocator &Alloc,
32 LiveRange &LR, const MachineOperand &MO) {
33 const MachineInstr &MI = *MO.getParent();
34 SlotIndex DefIdx =
35 Indexes.getInstructionIndex(MI).getRegSlot(EC: MO.isEarlyClobber());
36
37 // Create the def in LR. This may find an existing def.
38 LR.createDeadDef(Def: DefIdx, VNIAlloc&: Alloc);
39}
40
41void LiveIntervalCalc::calculate(LiveInterval &LI, bool TrackSubRegs) {
42 const MachineRegisterInfo *MRI = getRegInfo();
43 SlotIndexes *Indexes = getIndexes();
44 VNInfo::Allocator *Alloc = getVNAlloc();
45
46 assert(MRI && Indexes && "call reset() first");
47
48 // Step 1: Create minimal live segments for every definition of Reg.
49 // Visit all def operands. If the same instruction has multiple defs of Reg,
50 // createDeadDef() will deduplicate.
51 const TargetRegisterInfo &TRI = *MRI->getTargetRegisterInfo();
52 Register Reg = LI.reg();
53 for (const MachineOperand &MO : MRI->reg_nodbg_operands(Reg)) {
54 if (!MO.isDef() && !MO.readsReg())
55 continue;
56
57 unsigned SubReg = MO.getSubReg();
58 if (LI.hasSubRanges() || (SubReg != 0 && TrackSubRegs)) {
59 LaneBitmask SubMask = SubReg != 0 ? TRI.getSubRegIndexLaneMask(SubIdx: SubReg)
60 : MRI->getMaxLaneMaskForVReg(Reg);
61 // If this is the first time we see a subregister def, initialize
62 // subranges by creating a copy of the main range.
63 if (!LI.hasSubRanges() && !LI.empty()) {
64 LaneBitmask ClassMask = MRI->getMaxLaneMaskForVReg(Reg);
65 LI.createSubRangeFrom(Allocator&: *Alloc, LaneMask: ClassMask, CopyFrom: LI);
66 }
67
68 LI.refineSubRanges(
69 Allocator&: *Alloc, LaneMask: SubMask,
70 Apply: [&MO, Indexes, Alloc](LiveInterval::SubRange &SR) {
71 if (MO.isDef())
72 createDeadDef(Indexes&: *Indexes, Alloc&: *Alloc, LR&: SR, MO);
73 },
74 Indexes: *Indexes, TRI);
75 }
76
77 // Create the def in the main liverange. We do not have to do this if
78 // subranges are tracked as we recreate the main range later in this case.
79 if (MO.isDef() && !LI.hasSubRanges())
80 createDeadDef(Indexes&: *Indexes, Alloc&: *Alloc, LR&: LI, MO);
81 }
82
83 // We may have created empty live ranges for partially undefined uses, we
84 // can't keep them because we won't find defs in them later.
85 LI.removeEmptySubRanges();
86
87 const MachineFunction *MF = getMachineFunction();
88 MachineDominatorTree *DomTree = getDomTree();
89 // Step 2: Extend live segments to all uses, constructing SSA form as
90 // necessary.
91 if (LI.hasSubRanges()) {
92 for (LiveInterval::SubRange &S : LI.subranges()) {
93 LiveIntervalCalc SubLIC;
94 SubLIC.reset(mf: MF, SI: Indexes, MDT: DomTree, VNIA: Alloc);
95 SubLIC.extendToUses(LR&: S, Reg, LaneMask: S.LaneMask, LI: &LI);
96 }
97 LI.clear();
98 constructMainRangeFromSubranges(LI);
99 } else {
100 resetLiveOutMap();
101 extendToUses(LR&: LI, Reg, LaneMask: LaneBitmask::getAll());
102 }
103}
104
105void LiveIntervalCalc::constructMainRangeFromSubranges(LiveInterval &LI) {
106 // First create dead defs at all defs found in subranges.
107 LiveRange &MainRange = LI;
108 assert(MainRange.segments.empty() && MainRange.valnos.empty() &&
109 "Expect empty main liverange");
110
111 VNInfo::Allocator *Alloc = getVNAlloc();
112 for (const LiveInterval::SubRange &SR : LI.subranges()) {
113 for (const VNInfo *VNI : SR.valnos) {
114 if (!VNI->isUnused() && !VNI->isPHIDef())
115 MainRange.createDeadDef(Def: VNI->def, VNIAlloc&: *Alloc);
116 }
117 }
118 resetLiveOutMap();
119 extendToUses(LR&: MainRange, Reg: LI.reg(), LaneMask: LaneBitmask::getAll(), LI: &LI);
120}
121
122void LiveIntervalCalc::createDeadDefs(LiveRange &LR, Register Reg) {
123 const MachineRegisterInfo *MRI = getRegInfo();
124 SlotIndexes *Indexes = getIndexes();
125 VNInfo::Allocator *Alloc = getVNAlloc();
126 assert(MRI && Indexes && "call reset() first");
127
128 // Visit all def operands. If the same instruction has multiple defs of Reg,
129 // LR.createDeadDef() will deduplicate.
130 for (MachineOperand &MO : MRI->def_operands(Reg))
131 createDeadDef(Indexes&: *Indexes, Alloc&: *Alloc, LR, MO);
132}
133
134void LiveIntervalCalc::extendToUses(LiveRange &LR, Register Reg,
135 LaneBitmask Mask, LiveInterval *LI) {
136 const MachineRegisterInfo *MRI = getRegInfo();
137 SlotIndexes *Indexes = getIndexes();
138 SmallVector<SlotIndex, 4> Undefs;
139 if (LI != nullptr)
140 LI->computeSubRangeUndefs(Undefs, LaneMask: Mask, MRI: *MRI, Indexes: *Indexes);
141
142 // Visit all operands that read Reg. This may include partial defs.
143 bool IsSubRange = !Mask.all();
144 const TargetRegisterInfo &TRI = *MRI->getTargetRegisterInfo();
145 for (MachineOperand &MO : MRI->reg_nodbg_operands(Reg)) {
146 // Clear all kill flags. They will be reinserted after register allocation
147 // by LiveIntervals::addKillFlags().
148 if (MO.isUse())
149 MO.setIsKill(false);
150 // MO::readsReg returns "true" for subregister defs. This is for keeping
151 // liveness of the entire register (i.e. for the main range of the live
152 // interval). For subranges, definitions of non-overlapping subregisters
153 // do not count as uses.
154 if (!MO.readsReg() || (IsSubRange && MO.isDef()))
155 continue;
156
157 unsigned SubReg = MO.getSubReg();
158 if (SubReg != 0) {
159 LaneBitmask SLM = TRI.getSubRegIndexLaneMask(SubIdx: SubReg);
160 if (MO.isDef())
161 SLM = ~SLM;
162 // Ignore uses not reading the current (sub)range.
163 if ((SLM & Mask).none())
164 continue;
165 }
166
167 // Determine the actual place of the use.
168 const MachineInstr *MI = MO.getParent();
169 unsigned OpNo = (&MO - &MI->getOperand(i: 0));
170 SlotIndex UseIdx;
171 if (MI->isPHI()) {
172 assert(!MO.isDef() && "Cannot handle PHI def of partial register.");
173 // The actual place where a phi operand is used is the end of the pred
174 // MBB. PHI operands are paired: (Reg, PredMBB).
175 UseIdx = Indexes->getMBBEndIdx(mbb: MI->getOperand(i: OpNo + 1).getMBB());
176 } else {
177 // Check for early-clobber redefs.
178 bool isEarlyClobber = false;
179 unsigned DefIdx;
180 if (MO.isDef())
181 isEarlyClobber = MO.isEarlyClobber();
182 else if (MI->isRegTiedToDefOperand(UseOpIdx: OpNo, DefOpIdx: &DefIdx)) {
183 // FIXME: This would be a lot easier if tied early-clobber uses also
184 // had an early-clobber flag.
185 isEarlyClobber = MI->getOperand(i: DefIdx).isEarlyClobber();
186 }
187 UseIdx = Indexes->getInstructionIndex(MI: *MI).getRegSlot(EC: isEarlyClobber);
188 }
189
190 // MI is reading Reg. We may have visited MI before if it happens to be
191 // reading Reg multiple times. That is OK, extend() is idempotent.
192 extend(LR, Use: UseIdx, PhysReg: Reg, Undefs);
193 }
194}
195