Newer
Older
Chris Lattner
committed
// If this folded instruction is just a use, check to see if it's a
// straight load from the virt reg slot.
if ((MR & VirtRegMap::isRef) && !(MR & VirtRegMap::isMod)) {
int FrameIdx;
Chris Lattner
committed
if (unsigned DestReg = TII->isLoadFromStackSlot(&MI, FrameIdx)) {
if (FrameIdx == SS) {
// If this spill slot is available, turn it into a copy (or nothing)
// instead of leaving it as a load!
if (unsigned InReg = Spills.getSpillSlotOrReMatPhysReg(SS)) {
DOUT << "Promoted Load To Copy: " << MI;
if (DestReg != InReg) {
MRI->copyRegToReg(MBB, &MI, DestReg, InReg,
MF.getSSARegMap()->getRegClass(VirtReg));
// Revisit the copy so we make sure to notice the effects of the
// operation on the destreg (either needing to RA it if it's
// virtual or needing to clobber any values if it's physical).
NextMII = &MI;
--NextMII; // backtrack to the copy.
Evan Cheng
committed
BackTracked = true;
} else
DOUT << "Removing now-noop copy: " << MI;
VRM.RemoveFromFoldedVirtMap(&MI);
MBB.erase(&MI);
Evan Cheng
committed
Erased = true;
goto ProcessNextInst;
Chris Lattner
committed
}
Chris Lattner
committed
}
}
Chris Lattner
committed
}
Chris Lattner
committed
Chris Lattner
committed
// If this reference is not a use, any previous store is now dead.
// Otherwise, the store to this stack slot is not dead anymore.
MachineInstr* DeadStore = MaybeDeadStores[SS];
if (DeadStore) {
if (!(MR & VirtRegMap::isRef)) { // Previous store is dead.
Chris Lattner
committed
// If we get here, the store is dead, nuke it now.
assert(VirtRegMap::isMod && "Can't be modref!");
DOUT << "Removed dead store:\t" << *DeadStore;
InvalidateKills(*DeadStore, RegKills, KillOps);
MBB.erase(DeadStore);
VRM.RemoveFromFoldedVirtMap(DeadStore);
Chris Lattner
committed
}
MaybeDeadStores[SS] = NULL;
Chris Lattner
committed
}
// If the spill slot value is available, and this is a new definition of
// the value, the value is not available anymore.
if (MR & VirtRegMap::isMod) {
// Notice that the value in this stack slot has been modified.
Spills.ModifyStackSlotOrReMat(SS);
// If this is *just* a mod of the value, check to see if this is just a
// store to the spill slot (i.e. the spill got merged into the copy). If
// so, realize that the vreg is available now, and add the store to the
// MaybeDeadStore info.
int StackSlot;
if (!(MR & VirtRegMap::isRef)) {
if (unsigned SrcReg = TII->isStoreToStackSlot(&MI, StackSlot)) {
assert(MRegisterInfo::isPhysicalRegister(SrcReg) &&
"Src hasn't been allocated yet?");
// Okay, this is certainly a store of SrcReg to [StackSlot]. Mark
// this as a potentially dead store in case there is a subsequent
// store into the stack slot without a read from it.
MaybeDeadStores[StackSlot] = &MI;
// If the stack slot value was previously available in some other
// register, change it now. Otherwise, make the register available,
// in PhysReg.
Spills.addAvailable(StackSlot, &MI, SrcReg, false/*don't clobber*/);
}
}
}
}
// Process all of the spilled defs.
for (unsigned i = 0, e = MI.getNumOperands(); i != e; ++i) {
MachineOperand &MO = MI.getOperand(i);
if (MO.isRegister() && MO.getReg() && MO.isDef()) {
unsigned VirtReg = MO.getReg();
if (!MRegisterInfo::isVirtualRegister(VirtReg)) {
Chris Lattner
committed
// Check to see if this is a noop copy. If so, eliminate the
// instruction before considering the dest reg to be changed.
unsigned Src, Dst;
if (TII->isMoveInstr(MI, Src, Dst) && Src == Dst) {
++NumDCE;
DOUT << "Removing now-noop copy: " << MI;
Chris Lattner
committed
MBB.erase(&MI);
Evan Cheng
committed
Erased = true;
Chris Lattner
committed
VRM.RemoveFromFoldedVirtMap(&MI);
Chris Lattner
committed
goto ProcessNextInst;
}
// If it's not a no-op copy, it clobbers the value in the destreg.
Chris Lattner
committed
Spills.ClobberPhysReg(VirtReg);
ReusedOperands.markClobbered(VirtReg);
// Check to see if this instruction is a load from a stack slot into
// a register. If so, this provides the stack slot value in the reg.
int FrameIdx;
if (unsigned DestReg = TII->isLoadFromStackSlot(&MI, FrameIdx)) {
assert(DestReg == VirtReg && "Unknown load situation!");
// If it is a folded reference, then it's not safe to clobber.
bool Folded = FoldedSS.count(FrameIdx);
// Otherwise, if it wasn't available, remember that it is now!
Spills.addAvailable(FrameIdx, &MI, DestReg, !Folded);
goto ProcessNextInst;
}
Chris Lattner
committed
continue;
bool DoReMat = VRM.isReMaterialized(VirtReg);
if (DoReMat)
ReMatDefs.insert(&MI);
// The only vregs left are stack slot definitions.
int StackSlot = VRM.getStackSlot(VirtReg);
const TargetRegisterClass *RC = MF.getSSARegMap()->getRegClass(VirtReg);
Chris Lattner
committed
// If this def is part of a two-address operand, make sure to execute
// the store from the correct physical register.
unsigned PhysReg;
int TiedOp = MI.getInstrDescriptor()->findTiedToSrcOperand(i);
if (TiedOp != -1)
PhysReg = MI.getOperand(TiedOp).getReg();
else {
Chris Lattner
committed
PhysReg = VRM.getPhys(VirtReg);
if (ReusedOperands.isClobbered(PhysReg)) {
// Another def has taken the assigned physreg. It must have been a
// use&def which got it due to reuse. Undo the reuse!
PhysReg = ReusedOperands.GetRegForReload(PhysReg, &MI,
Spills, MaybeDeadStores, RegKills, KillOps, VRM);
}
}
ReusedOperands.markClobbered(PhysReg);
MI.getOperand(i).setReg(PhysReg);
1144
1145
1146
1147
1148
1149
1150
1151
1152
1153
1154
1155
1156
1157
1158
1159
1160
1161
1162
1163
1164
1165
1166
1167
1168
1169
1170
1171
1172
1173
1174
1175
1176
1177
1178
1179
1180
if (!MO.isDead()) {
MRI->storeRegToStackSlot(MBB, next(MII), PhysReg, StackSlot, RC);
DOUT << "Store:\t" << *next(MII);
// If there is a dead store to this stack slot, nuke it now.
MachineInstr *&LastStore = MaybeDeadStores[StackSlot];
if (LastStore) {
DOUT << "Removed dead store:\t" << *LastStore;
++NumDSE;
SmallVector<unsigned, 1> KillRegs;
InvalidateKills(*LastStore, RegKills, KillOps, &KillRegs);
MachineBasicBlock::iterator PrevMII = LastStore;
bool CheckDef = PrevMII != MBB.begin();
if (CheckDef)
--PrevMII;
MBB.erase(LastStore);
VRM.RemoveFromFoldedVirtMap(LastStore);
if (CheckDef) {
// Look at defs of killed registers on the store. Mark the defs
// as dead since the store has been deleted and they aren't
// being reused.
for (unsigned j = 0, ee = KillRegs.size(); j != ee; ++j) {
bool HasOtherDef = false;
if (InvalidateRegDef(PrevMII, MI, KillRegs[j], HasOtherDef)) {
MachineInstr *DeadDef = PrevMII;
if (ReMatDefs.count(DeadDef) && !HasOtherDef) {
// FIXME: This assumes a remat def does not have side
// effects.
MBB.erase(DeadDef);
VRM.RemoveFromFoldedVirtMap(DeadDef);
++NumDRM;
}
}
}
}
}
LastStore = next(MII);
1182
1183
1184
1185
1186
1187
1188
1189
1190
1191
1192
1193
1194
1195
1196
1197
1198
1199
1200
1201
1202
// If the stack slot value was previously available in some other
// register, change it now. Otherwise, make the register available,
// in PhysReg.
Spills.ModifyStackSlotOrReMat(StackSlot);
Spills.ClobberPhysReg(PhysReg);
Spills.addAvailable(StackSlot, LastStore, PhysReg);
++NumStores;
// Check to see if this is a noop copy. If so, eliminate the
// instruction before considering the dest reg to be changed.
{
unsigned Src, Dst;
if (TII->isMoveInstr(MI, Src, Dst) && Src == Dst) {
++NumDCE;
DOUT << "Removing now-noop copy: " << MI;
MBB.erase(&MI);
Erased = true;
VRM.RemoveFromFoldedVirtMap(&MI);
UpdateKills(*LastStore, RegKills, KillOps);
goto ProcessNextInst;
}
}
}
Chris Lattner
committed
ProcessNextInst:
Evan Cheng
committed
if (!Erased && !BackTracked)
for (MachineBasicBlock::iterator II = MI; II != NextMII; ++II)
UpdateKills(*II, RegKills, KillOps);
MII = NextMII;
}
Chris Lattner
committed
llvm::Spiller* llvm::createSpiller() {
switch (SpillerOpt) {
default: assert(0 && "Unreachable!");
case local:
return new LocalSpiller();
case simple:
return new SimpleSpiller();
}