Newer
Older
// Only mark it clobbered if this is a use&def operand.
ReusedOperands.markClobbered(PhysReg);
++NumReused;
2004
2005
2006
2007
2008
2009
2010
2011
2012
2013
2014
2015
2016
2017
2018
2019
2020
2021
2022
2023
2024
if (MI.getOperand(i).isKill() &&
ReuseSlot <= VirtRegMap::MAX_STACK_SLOT) {
// The store of this spilled value is potentially dead, but we
// won't know for certain until we've confirmed that the re-use
// above is valid, which means waiting until the other operands
// are processed. For now we just track the spill slot, we'll
// remove it after the other operands are processed if valid.
PotentialDeadStoreSlots.push_back(ReuseSlot);
}
// Mark is isKill if it's there no other uses of the same virtual
// register and it's not a two-address operand. IsKill will be
// unset if reg is reused.
if (!isTied && KilledMIRegs.count(VirtReg) == 0) {
MI.getOperand(i).setIsKill();
KilledMIRegs.insert(VirtReg);
}
continue;
} // CanReuse
// Otherwise we have a situation where we have a two-address instruction
// whose mod/ref operand needs to be reloaded. This reload is already
// available in some register "PhysReg", but if we used PhysReg as the
// operand to our 2-addr instruction, the instruction would modify
// PhysReg. This isn't cool if something later uses PhysReg and expects
// to get its initial value.
//
// To avoid this problem, and to avoid doing a load right after a store,
// we emit a copy from PhysReg into the designated register for this
// operand.
unsigned DesignatedReg = VRM->getPhys(VirtReg);
assert(DesignatedReg && "Must map virtreg to physreg!");
// Note that, if we reused a register for a previous operand, the
// register we want to reload into might not actually be
// available. If this occurs, use the register indicated by the
// reuser.
if (ReusedOperands.hasReuses())
2046
2047
2048
2049
2050
2051
2052
2053
2054
2055
2056
2057
2058
2059
2060
2061
2062
2063
2064
2065
2066
2067
2068
DesignatedReg = ReusedOperands.
GetRegForReload(VirtReg, DesignatedReg, &MI, Spills,
MaybeDeadStores, RegKills, KillOps, *VRM);
// If the mapped designated register is actually the physreg we have
// incoming, we don't need to inserted a dead copy.
if (DesignatedReg == PhysReg) {
// If this stack slot value is already available, reuse it!
if (ReuseSlot > VirtRegMap::MAX_STACK_SLOT)
DEBUG(dbgs() << "Reusing RM#"
<< ReuseSlot-VirtRegMap::MAX_STACK_SLOT-1);
else
DEBUG(dbgs() << "Reusing SS#" << ReuseSlot);
DEBUG(dbgs() << " from physreg " << TRI->getName(PhysReg)
<< " for vreg" << VirtReg
<< " instead of reloading into same physreg.\n");
unsigned RReg = SubIdx ? TRI->getSubReg(PhysReg, SubIdx) : PhysReg;
MI.getOperand(i).setReg(RReg);
MI.getOperand(i).setSubReg(0);
ReusedOperands.markClobbered(RReg);
++NumReused;
continue;
}
const TargetRegisterClass* RC = MRI->getRegClass(VirtReg);
MRI->setPhysRegUsed(DesignatedReg);
ReusedOperands.markClobbered(DesignatedReg);
// Back-schedule reloads and remats.
MachineBasicBlock::iterator InsertLoc =
ComputeReloadLoc(&MI, MBB->begin(), PhysReg, TRI, DoReMat,
SSorRMId, TII, MF);
TII->copyRegToReg(*MBB, InsertLoc, DesignatedReg, PhysReg, RC, RC);
MachineInstr *CopyMI = prior(InsertLoc);
CopyMI->setAsmPrinterFlag(MachineInstr::ReloadReuse);
UpdateKills(*CopyMI, TRI, RegKills, KillOps);
// This invalidates DesignatedReg.
Spills.ClobberPhysReg(DesignatedReg);
Spills.addAvailable(ReuseSlot, DesignatedReg);
unsigned RReg =
SubIdx ? TRI->getSubReg(DesignatedReg, SubIdx) : DesignatedReg;
MI.getOperand(i).setReg(RReg);
MI.getOperand(i).setSubReg(0);
DEBUG(dbgs() << '\t' << *prior(MII));
++NumReused;
continue;
} // if (PhysReg)
2098
2099
2100
2101
2102
2103
2104
2105
2106
2107
2108
2109
2110
2111
2112
2113
2114
2115
2116
2117
2118
2119
2120
2121
2122
2123
2124
2125
2126
2127
2128
2129
2130
2131
2132
2133
2134
2135
2136
2137
2138
2139
2140
2141
2142
// Otherwise, reload it and remember that we have it.
PhysReg = VRM->getPhys(VirtReg);
assert(PhysReg && "Must map virtreg to physreg!");
// Note that, if we reused a register for a previous operand, the
// register we want to reload into might not actually be
// available. If this occurs, use the register indicated by the
// reuser.
if (ReusedOperands.hasReuses())
PhysReg = ReusedOperands.GetRegForReload(VirtReg, PhysReg, &MI,
Spills, MaybeDeadStores, RegKills, KillOps, *VRM);
MRI->setPhysRegUsed(PhysReg);
ReusedOperands.markClobbered(PhysReg);
if (AvoidReload)
++NumAvoided;
else {
// Back-schedule reloads and remats.
MachineBasicBlock::iterator InsertLoc =
ComputeReloadLoc(MII, MBB->begin(), PhysReg, TRI, DoReMat,
SSorRMId, TII, MF);
if (DoReMat) {
ReMaterialize(*MBB, InsertLoc, PhysReg, VirtReg, TII, TRI, *VRM);
} else {
const TargetRegisterClass* RC = MRI->getRegClass(VirtReg);
TII->loadRegFromStackSlot(*MBB, InsertLoc, PhysReg, SSorRMId, RC);
MachineInstr *LoadMI = prior(InsertLoc);
VRM->addSpillSlotUse(SSorRMId, LoadMI);
++NumLoads;
DistanceMap.insert(std::make_pair(LoadMI, Dist++));
}
// This invalidates PhysReg.
Spills.ClobberPhysReg(PhysReg);
// Any stores to this stack slot are not dead anymore.
if (!DoReMat)
MaybeDeadStores[SSorRMId] = NULL;
Spills.addAvailable(SSorRMId, PhysReg);
// Assumes this is the last use. IsKill will be unset if reg is reused
// unless it's a two-address operand.
if (!MI.isRegTiedToDefOperand(i) &&
KilledMIRegs.count(VirtReg) == 0) {
MI.getOperand(i).setIsKill();
KilledMIRegs.insert(VirtReg);
}
UpdateKills(*prior(InsertLoc), TRI, RegKills, KillOps);
DEBUG(dbgs() << '\t' << *prior(InsertLoc));
}
unsigned RReg = SubIdx ? TRI->getSubReg(PhysReg, SubIdx) : PhysReg;
MI.getOperand(i).setReg(RReg);
MI.getOperand(i).setSubReg(0);
}
// Ok - now we can remove stores that have been confirmed dead.
for (unsigned j = 0, e = PotentialDeadStoreSlots.size(); j != e; ++j) {
// This was the last use and the spilled value is still available
// for reuse. That means the spill was unnecessary!
int PDSSlot = PotentialDeadStoreSlots[j];
MachineInstr* DeadStore = MaybeDeadStores[PDSSlot];
if (DeadStore) {
DEBUG(dbgs() << "Removed dead store:\t" << *DeadStore);
InvalidateKills(*DeadStore, TRI, RegKills, KillOps);
VRM->RemoveMachineInstrFromMaps(DeadStore);
MBB->erase(DeadStore);
MaybeDeadStores[PDSSlot] = NULL;
++NumDSE;
}
}
DEBUG(dbgs() << '\t' << MI);
2173
2174
2175
2176
2177
2178
2179
2180
2181
2182
2183
2184
2185
2186
2187
2188
2189
2190
2191
2192
2193
2194
2195
2196
2197
2198
2199
2200
2201
2202
2203
2204
2205
2206
2207
2208
2209
2210
2211
2212
2213
2214
2215
// If we have folded references to memory operands, make sure we clear all
// physical registers that may contain the value of the spilled virtual
// register
SmallSet<int, 2> FoldedSS;
for (tie(I, End) = VRM->getFoldedVirts(&MI); I != End; ) {
unsigned VirtReg = I->second.first;
VirtRegMap::ModRef MR = I->second.second;
DEBUG(dbgs() << "Folded vreg: " << VirtReg << " MR: " << MR);
// MI2VirtMap be can updated which invalidate the iterator.
// Increment the iterator first.
++I;
int SS = VRM->getStackSlot(VirtReg);
if (SS == VirtRegMap::NO_STACK_SLOT)
continue;
FoldedSS.insert(SS);
DEBUG(dbgs() << " - StackSlot: " << SS << "\n");
// If this folded instruction is just a use, check to see if it's a
// straight load from the virt reg slot.
if ((MR & VirtRegMap::isRef) && !(MR & VirtRegMap::isMod)) {
int FrameIdx;
unsigned DestReg = TII->isLoadFromStackSlot(&MI, FrameIdx);
if (DestReg && FrameIdx == SS) {
// If this spill slot is available, turn it into a copy (or nothing)
// instead of leaving it as a load!
if (unsigned InReg = Spills.getSpillSlotOrReMatPhysReg(SS)) {
DEBUG(dbgs() << "Promoted Load To Copy: " << MI);
if (DestReg != InReg) {
const TargetRegisterClass *RC = MRI->getRegClass(VirtReg);
TII->copyRegToReg(*MBB, &MI, DestReg, InReg, RC, RC);
MachineOperand *DefMO = MI.findRegisterDefOperand(DestReg);
unsigned SubIdx = DefMO->getSubReg();
// Revisit the copy so we make sure to notice the effects of the
// operation on the destreg (either needing to RA it if it's
// virtual or needing to clobber any values if it's physical).
NextMII = &MI;
--NextMII; // backtrack to the copy.
NextMII->setAsmPrinterFlag(MachineInstr::ReloadReuse);
// Propagate the sub-register index over.
if (SubIdx) {
DefMO = NextMII->findRegisterDefOperand(DestReg);
DefMO->setSubReg(SubIdx);
}
// Mark is killed.
MachineOperand *KillOpnd = NextMII->findRegisterUseOperand(InReg);
KillOpnd->setIsKill();
BackTracked = true;
} else {
DEBUG(dbgs() << "Removing now-noop copy: " << MI);
// Unset last kill since it's being reused.
InvalidateKill(InReg, TRI, RegKills, KillOps);
Spills.disallowClobberPhysReg(InReg);
}
2229
2230
2231
2232
2233
2234
2235
2236
2237
2238
2239
2240
2241
2242
2243
2244
2245
2246
2247
2248
2249
2250
2251
2252
2253
2254
2255
2256
2257
2258
2259
2260
2261
2262
2263
2264
2265
2266
2267
2268
2269
2270
2271
2272
2273
2274
2275
2276
2277
2278
InvalidateKills(MI, TRI, RegKills, KillOps);
VRM->RemoveMachineInstrFromMaps(&MI);
MBB->erase(&MI);
Erased = true;
goto ProcessNextInst;
}
} else {
unsigned PhysReg = Spills.getSpillSlotOrReMatPhysReg(SS);
SmallVector<MachineInstr*, 4> NewMIs;
if (PhysReg &&
TII->unfoldMemoryOperand(MF, &MI, PhysReg, false, false, NewMIs)) {
MBB->insert(MII, NewMIs[0]);
InvalidateKills(MI, TRI, RegKills, KillOps);
VRM->RemoveMachineInstrFromMaps(&MI);
MBB->erase(&MI);
Erased = true;
--NextMII; // backtrack to the unfolded instruction.
BackTracked = true;
goto ProcessNextInst;
}
}
}
// If this reference is not a use, any previous store is now dead.
// Otherwise, the store to this stack slot is not dead anymore.
MachineInstr* DeadStore = MaybeDeadStores[SS];
if (DeadStore) {
bool isDead = !(MR & VirtRegMap::isRef);
MachineInstr *NewStore = NULL;
if (MR & VirtRegMap::isModRef) {
unsigned PhysReg = Spills.getSpillSlotOrReMatPhysReg(SS);
SmallVector<MachineInstr*, 4> NewMIs;
// We can reuse this physreg as long as we are allowed to clobber
// the value and there isn't an earlier def that has already clobbered
// the physreg.
if (PhysReg &&
!ReusedOperands.isClobbered(PhysReg) &&
Spills.canClobberPhysReg(PhysReg) &&
!TII->isStoreToStackSlot(&MI, SS)) { // Not profitable!
MachineOperand *KillOpnd =
DeadStore->findRegisterUseOperand(PhysReg, true);
// Note, if the store is storing a sub-register, it's possible the
// super-register is needed below.
if (KillOpnd && !KillOpnd->getSubReg() &&
TII->unfoldMemoryOperand(MF, &MI, PhysReg, false, true,NewMIs)){
MBB->insert(MII, NewMIs[0]);
NewStore = NewMIs[1];
MBB->insert(MII, NewStore);
VRM->addSpillSlotUse(SS, NewStore);
InvalidateKills(MI, TRI, RegKills, KillOps);
VRM->RemoveMachineInstrFromMaps(&MI);
MBB->erase(&MI);
Erased = true;
--NextMII;
--NextMII; // backtrack to the unfolded instruction.
BackTracked = true;
isDead = true;
++NumSUnfold;
}
}
}
2292
2293
2294
2295
2296
2297
2298
2299
2300
2301
2302
2303
2304
2305
2306
2307
2308
2309
2310
2311
2312
2313
2314
2315
2316
2317
2318
2319
2320
2321
2322
2323
2324
2325
2326
2327
2328
2329
2330
2331
if (isDead) { // Previous store is dead.
// If we get here, the store is dead, nuke it now.
DEBUG(dbgs() << "Removed dead store:\t" << *DeadStore);
InvalidateKills(*DeadStore, TRI, RegKills, KillOps);
VRM->RemoveMachineInstrFromMaps(DeadStore);
MBB->erase(DeadStore);
if (!NewStore)
++NumDSE;
}
MaybeDeadStores[SS] = NULL;
if (NewStore) {
// Treat this store as a spill merged into a copy. That makes the
// stack slot value available.
VRM->virtFolded(VirtReg, NewStore, VirtRegMap::isMod);
goto ProcessNextInst;
}
}
// If the spill slot value is available, and this is a new definition of
// the value, the value is not available anymore.
if (MR & VirtRegMap::isMod) {
// Notice that the value in this stack slot has been modified.
Spills.ModifyStackSlotOrReMat(SS);
// If this is *just* a mod of the value, check to see if this is just a
// store to the spill slot (i.e. the spill got merged into the copy). If
// so, realize that the vreg is available now, and add the store to the
// MaybeDeadStore info.
int StackSlot;
if (!(MR & VirtRegMap::isRef)) {
if (unsigned SrcReg = TII->isStoreToStackSlot(&MI, StackSlot)) {
assert(TargetRegisterInfo::isPhysicalRegister(SrcReg) &&
"Src hasn't been allocated yet?");
if (CommuteToFoldReload(MII, VirtReg, SrcReg, StackSlot,
Spills, RegKills, KillOps, TRI)) {
NextMII = llvm::next(MII);
BackTracked = true;
goto ProcessNextInst;
}
// Okay, this is certainly a store of SrcReg to [StackSlot]. Mark
// this as a potentially dead store in case there is a subsequent
// store into the stack slot without a read from it.
MaybeDeadStores[StackSlot] = &MI;
// If the stack slot value was previously available in some other
// register, change it now. Otherwise, make the register
// available in PhysReg.
Spills.addAvailable(StackSlot, SrcReg, MI.killsRegister(SrcReg));
}
}
}
}
2348
2349
2350
2351
2352
2353
2354
2355
2356
2357
2358
2359
2360
2361
2362
2363
2364
2365
2366
2367
2368
2369
2370
2371
2372
2373
2374
// Process all of the spilled defs.
for (unsigned i = 0, e = MI.getNumOperands(); i != e; ++i) {
MachineOperand &MO = MI.getOperand(i);
if (!(MO.isReg() && MO.getReg() && MO.isDef()))
continue;
unsigned VirtReg = MO.getReg();
if (!TargetRegisterInfo::isVirtualRegister(VirtReg)) {
// Check to see if this is a noop copy. If so, eliminate the
// instruction before considering the dest reg to be changed.
// Also check if it's copying from an "undef", if so, we can't
// eliminate this or else the undef marker is lost and it will
// confuses the scavenger. This is extremely rare.
unsigned Src, Dst, SrcSR, DstSR;
if (TII->isMoveInstr(MI, Src, Dst, SrcSR, DstSR) && Src == Dst &&
!MI.findRegisterUseOperand(Src)->isUndef()) {
++NumDCE;
DEBUG(dbgs() << "Removing now-noop copy: " << MI);
SmallVector<unsigned, 2> KillRegs;
InvalidateKills(MI, TRI, RegKills, KillOps, &KillRegs);
if (MO.isDead() && !KillRegs.empty()) {
// Source register or an implicit super/sub-register use is killed.
assert(KillRegs[0] == Dst ||
TRI->isSubRegister(KillRegs[0], Dst) ||
TRI->isSuperRegister(KillRegs[0], Dst));
// Last def is now dead.
TransferDeadness(Dist, Src, RegKills, KillOps);
}
VRM->RemoveMachineInstrFromMaps(&MI);
MBB->erase(&MI);
Erased = true;
Spills.disallowClobberPhysReg(VirtReg);
goto ProcessNextInst;
}
// If it's not a no-op copy, it clobbers the value in the destreg.
Spills.ClobberPhysReg(VirtReg);
ReusedOperands.markClobbered(VirtReg);
// Check to see if this instruction is a load from a stack slot into
// a register. If so, this provides the stack slot value in the reg.
int FrameIdx;
if (unsigned DestReg = TII->isLoadFromStackSlot(&MI, FrameIdx)) {
assert(DestReg == VirtReg && "Unknown load situation!");
// If it is a folded reference, then it's not safe to clobber.
bool Folded = FoldedSS.count(FrameIdx);
// Otherwise, if it wasn't available, remember that it is now!
Spills.addAvailable(FrameIdx, DestReg, !Folded);
goto ProcessNextInst;
}
continue;
}
2403
2404
2405
2406
2407
2408
2409
2410
2411
2412
2413
2414
2415
2416
2417
2418
2419
2420
2421
2422
2423
2424
2425
2426
2427
2428
2429
2430
unsigned SubIdx = MO.getSubReg();
bool DoReMat = VRM->isReMaterialized(VirtReg);
if (DoReMat)
ReMatDefs.insert(&MI);
// The only vregs left are stack slot definitions.
int StackSlot = VRM->getStackSlot(VirtReg);
const TargetRegisterClass *RC = MRI->getRegClass(VirtReg);
// If this def is part of a two-address operand, make sure to execute
// the store from the correct physical register.
unsigned PhysReg;
unsigned TiedOp;
if (MI.isRegTiedToUseOperand(i, &TiedOp)) {
PhysReg = MI.getOperand(TiedOp).getReg();
if (SubIdx) {
unsigned SuperReg = findSuperReg(RC, PhysReg, SubIdx, TRI);
assert(SuperReg && TRI->getSubReg(SuperReg, SubIdx) == PhysReg &&
"Can't find corresponding super-register!");
PhysReg = SuperReg;
}
} else {
PhysReg = VRM->getPhys(VirtReg);
if (ReusedOperands.isClobbered(PhysReg)) {
// Another def has taken the assigned physreg. It must have been a
// use&def which got it due to reuse. Undo the reuse!
PhysReg = ReusedOperands.GetRegForReload(VirtReg, PhysReg, &MI,
Spills, MaybeDeadStores, RegKills, KillOps, *VRM);
}
}
assert(PhysReg && "VR not assigned a physical register?");
MRI->setPhysRegUsed(PhysReg);
unsigned RReg = SubIdx ? TRI->getSubReg(PhysReg, SubIdx) : PhysReg;
ReusedOperands.markClobbered(RReg);
MI.getOperand(i).setReg(RReg);
MI.getOperand(i).setSubReg(0);
if (!MO.isDead()) {
MachineInstr *&LastStore = MaybeDeadStores[StackSlot];
SpillRegToStackSlot(MII, -1, PhysReg, StackSlot, RC, true,
LastStore, Spills, ReMatDefs, RegKills, KillOps);
NextMII = llvm::next(MII);
// Check to see if this is a noop copy. If so, eliminate the
// instruction before considering the dest reg to be changed.
{
unsigned Src, Dst, SrcSR, DstSR;
if (TII->isMoveInstr(MI, Src, Dst, SrcSR, DstSR) && Src == Dst) {
++NumDCE;
DEBUG(dbgs() << "Removing now-noop copy: " << MI);
InvalidateKills(MI, TRI, RegKills, KillOps);
VRM->RemoveMachineInstrFromMaps(&MI);
MBB->erase(&MI);
Erased = true;
UpdateKills(*LastStore, TRI, RegKills, KillOps);
goto ProcessNextInst;
}
}
}
ProcessNextInst:
// Delete dead instructions without side effects.
if (!Erased && !BackTracked && isSafeToDelete(MI)) {
InvalidateKills(MI, TRI, RegKills, KillOps);
VRM->RemoveMachineInstrFromMaps(&MI);
MBB->erase(&MI);
Erased = true;
}
if (!Erased)
DistanceMap.insert(std::make_pair(&MI, Dist++));
if (!Erased && !BackTracked) {
for (MachineBasicBlock::iterator II = &MI; II != NextMII; ++II)
UpdateKills(*II, TRI, RegKills, KillOps);
}
MII = NextMII;
}
llvm::VirtRegRewriter* llvm::createVirtRegRewriter() {
switch (RewriterOpt) {
default: llvm_unreachable("Unreachable!");
case local:
return new LocalRewriter();
Lang Hames
committed
case trivial:
return new TrivialRewriter();