Newer
Older
//===-------- InlineSpiller.cpp - Insert spills and restores inline -------===//
//
// The LLVM Compiler Infrastructure
//
// This file is distributed under the University of Illinois Open Source
// License. See LICENSE.TXT for details.
//
//===----------------------------------------------------------------------===//
//
// The inline spiller modifies the machine function directly instead of
// inserting spills and restores in VirtRegMap.
//
//===----------------------------------------------------------------------===//
Jakob Stoklund Olesen
committed
#include "LiveRangeEdit.h"
#include "VirtRegMap.h"
#include "llvm/ADT/Statistic.h"
Jakob Stoklund Olesen
committed
#include "llvm/ADT/TinyPtrVector.h"
Jakob Stoklund Olesen
committed
#include "llvm/Analysis/AliasAnalysis.h"
#include "llvm/CodeGen/LiveIntervalAnalysis.h"
#include "llvm/CodeGen/LiveStackAnalysis.h"
Jakob Stoklund Olesen
committed
#include "llvm/CodeGen/MachineDominators.h"
#include "llvm/CodeGen/MachineFrameInfo.h"
#include "llvm/CodeGen/MachineFunction.h"
Jakob Stoklund Olesen
committed
#include "llvm/CodeGen/MachineLoopInfo.h"
#include "llvm/CodeGen/MachineRegisterInfo.h"
#include "llvm/Target/TargetMachine.h"
#include "llvm/Target/TargetInstrInfo.h"
#include "llvm/Support/Debug.h"
#include "llvm/Support/raw_ostream.h"
using namespace llvm;
STATISTIC(NumSpilledRanges, "Number of spilled live ranges");
STATISTIC(NumSnippets, "Number of snippets included in spills");
STATISTIC(NumSpills, "Number of spills inserted");
STATISTIC(NumReloads, "Number of reloads inserted");
STATISTIC(NumFolded, "Number of folded stack accesses");
STATISTIC(NumFoldedLoads, "Number of folded loads");
STATISTIC(NumRemats, "Number of rematerialized defs for spilling");
STATISTIC(NumOmitReloadSpill, "Number of omitted spills after reloads");
STATISTIC(NumHoistLocal, "Number of locally hoisted spills");
STATISTIC(NumHoistGlobal, "Number of globally hoisted spills");
STATISTIC(NumRedundantSpills, "Number of redundant spills identified");
namespace {
class InlineSpiller : public Spiller {
MachineFunctionPass &Pass;
MachineFunction &MF;
LiveIntervals &LIS;
LiveStacks &LSS;
AliasAnalysis *AA;
Jakob Stoklund Olesen
committed
MachineDominatorTree &MDT;
MachineLoopInfo &Loops;
VirtRegMap &VRM;
MachineFrameInfo &MFI;
MachineRegisterInfo &MRI;
const TargetInstrInfo &TII;
const TargetRegisterInfo &TRI;
// Variables that are valid during spill(), but used by multiple methods.
LiveRangeEdit *Edit;
LiveInterval *StackInt;
int StackSlot;
Jakob Stoklund Olesen
committed
unsigned Original;
// All registers to spill to StackSlot, including the main register.
SmallVector<unsigned, 8> RegsToSpill;
// All COPY instructions to/from snippets.
// They are ignored since both operands refer to the same stack slot.
SmallPtrSet<MachineInstr*, 8> SnippetCopies;
Jakob Stoklund Olesen
committed
// Values that failed to remat at some point.
SmallPtrSet<VNInfo*, 8> UsedValues;
Jakob Stoklund Olesen
committed
Jakob Stoklund Olesen
committed
public:
Jakob Stoklund Olesen
committed
// Information about a value that was defined by a copy from a sibling
// register.
struct SibValueInfo {
// True when all reaching defs were reloads: No spill is necessary.
bool AllDefsAreReloads;
Jakob Stoklund Olesen
committed
// True when value is defined by an original PHI not from splitting.
bool DefByOrigPHI;
Jakob Stoklund Olesen
committed
// The preferred register to spill.
unsigned SpillReg;
// The value of SpillReg that should be spilled.
VNInfo *SpillVNI;
Jakob Stoklund Olesen
committed
// The block where SpillVNI should be spilled. Currently, this must be the
// block containing SpillVNI->def.
MachineBasicBlock *SpillMBB;
Jakob Stoklund Olesen
committed
// A defining instruction that is not a sibling copy or a reload, or NULL.
// This can be used as a template for rematerialization.
MachineInstr *DefMI;
Jakob Stoklund Olesen
committed
// List of values that depend on this one. These values are actually the
// same, but live range splitting has placed them in different registers,
// or SSA update needed to insert PHI-defs to preserve SSA form. This is
// copies of the current value and phi-kills. Usually only phi-kills cause
// more than one dependent value.
TinyPtrVector<VNInfo*> Deps;
Jakob Stoklund Olesen
committed
SibValueInfo(unsigned Reg, VNInfo *VNI)
Jakob Stoklund Olesen
committed
: AllDefsAreReloads(true), DefByOrigPHI(false),
SpillReg(Reg), SpillVNI(VNI), SpillMBB(0), DefMI(0) {}
// Returns true when a def has been found.
bool hasDef() const { return DefByOrigPHI || DefMI; }
Jakob Stoklund Olesen
committed
};
Jakob Stoklund Olesen
committed
private:
Jakob Stoklund Olesen
committed
// Values in RegsToSpill defined by sibling copies.
Jakob Stoklund Olesen
committed
typedef DenseMap<VNInfo*, SibValueInfo> SibValueMap;
SibValueMap SibValues;
Jakob Stoklund Olesen
committed
// Values live-out from basic blocks. This is the same as
// LI.getVNInfoAt(LIS.getMBBEndIdx(MBB).getPrevSlot())
typedef DenseMap<MachineBasicBlock*, VNInfo*> LiveOutMap;
LiveOutMap LiveOutValues;
Jakob Stoklund Olesen
committed
// Dead defs generated during spilling.
SmallVector<MachineInstr*, 8> DeadDefs;
Jakob Stoklund Olesen
committed
~InlineSpiller() {}
public:
Jakob Stoklund Olesen
committed
InlineSpiller(MachineFunctionPass &pass,
MachineFunction &mf,
VirtRegMap &vrm)
: Pass(pass),
MF(mf),
LIS(pass.getAnalysis<LiveIntervals>()),
LSS(pass.getAnalysis<LiveStacks>()),
AA(&pass.getAnalysis<AliasAnalysis>()),
Jakob Stoklund Olesen
committed
MDT(pass.getAnalysis<MachineDominatorTree>()),
Loops(pass.getAnalysis<MachineLoopInfo>()),
VRM(vrm),
MFI(*mf.getFrameInfo()),
MRI(mf.getRegInfo()),
TII(*mf.getTarget().getInstrInfo()),
TRI(*mf.getTarget().getRegisterInfo()) {}
Jakob Stoklund Olesen
committed
void spill(LiveRangeEdit &);
Jakob Stoklund Olesen
committed
private:
bool isSnippet(const LiveInterval &SnipLI);
void collectRegsToSpill();
Jakob Stoklund Olesen
committed
bool isRegToSpill(unsigned Reg) {
return std::find(RegsToSpill.begin(),
RegsToSpill.end(), Reg) != RegsToSpill.end();
}
bool isSibling(unsigned Reg);
Jakob Stoklund Olesen
committed
MachineInstr *traceSiblingValue(unsigned, VNInfo*, VNInfo*);
Jakob Stoklund Olesen
committed
void propagateSiblingValue(SibValueMap::iterator, VNInfo *VNI = 0);
Jakob Stoklund Olesen
committed
void analyzeSiblingValues();
Jakob Stoklund Olesen
committed
bool hoistSpill(LiveInterval &SpillLI, MachineInstr *CopyMI);
Jakob Stoklund Olesen
committed
void eliminateRedundantSpills(LiveInterval &LI, VNInfo *VNI);
Jakob Stoklund Olesen
committed
Jakob Stoklund Olesen
committed
void markValueUsed(LiveInterval*, VNInfo*);
bool reMaterializeFor(LiveInterval&, MachineBasicBlock::iterator MI);
Jakob Stoklund Olesen
committed
void reMaterializeAll();
bool coalesceStackAccess(MachineInstr *MI, unsigned Reg);
bool foldMemoryOperand(MachineBasicBlock::iterator MI,
Jakob Stoklund Olesen
committed
const SmallVectorImpl<unsigned> &Ops,
MachineInstr *LoadMI = 0);
Jakob Stoklund Olesen
committed
void insertReload(LiveInterval &NewLI, SlotIndex,
MachineBasicBlock::iterator MI);
void insertSpill(LiveInterval &NewLI, const LiveInterval &OldLI,
Jakob Stoklund Olesen
committed
SlotIndex, MachineBasicBlock::iterator MI);
void spillAroundUses(unsigned Reg);
Jakob Stoklund Olesen
committed
void spillAll();
};
}
namespace llvm {
Jakob Stoklund Olesen
committed
Spiller *createInlineSpiller(MachineFunctionPass &pass,
MachineFunction &mf,
VirtRegMap &vrm) {
return new InlineSpiller(pass, mf, vrm);
//===----------------------------------------------------------------------===//
// Snippets
//===----------------------------------------------------------------------===//
// When spilling a virtual register, we also spill any snippets it is connected
// to. The snippets are small live ranges that only have a single real use,
// leftovers from live range splitting. Spilling them enables memory operand
// folding or tightens the live range around the single use.
//
// This minimizes register pressure and maximizes the store-to-load distance for
// spill slots which can be important in tight loops.
/// isFullCopyOf - If MI is a COPY to or from Reg, return the other register,
/// otherwise return 0.
static unsigned isFullCopyOf(const MachineInstr *MI, unsigned Reg) {
return 0;
if (MI->getOperand(0).getReg() == Reg)
return MI->getOperand(1).getReg();
if (MI->getOperand(1).getReg() == Reg)
return MI->getOperand(0).getReg();
return 0;
}
/// isSnippet - Identify if a live interval is a snippet that should be spilled.
/// It is assumed that SnipLI is a virtual register with the same original as
/// Edit->getReg().
bool InlineSpiller::isSnippet(const LiveInterval &SnipLI) {
unsigned Reg = Edit->getReg();
// A snippet is a tiny live range with only a single instruction using it
// besides copies to/from Reg or spills/fills. We accept:
//
// %snip = COPY %Reg / FILL fi#
// %snip = USE %snip
// %Reg = COPY %snip / SPILL %snip, fi#
//
if (SnipLI.getNumValNums() > 2 || !LIS.intervalIsInOneMBB(SnipLI))
return false;
MachineInstr *UseMI = 0;
// Check that all uses satisfy our criteria.
for (MachineRegisterInfo::reg_nodbg_iterator
RI = MRI.reg_nodbg_begin(SnipLI.reg);
MachineInstr *MI = RI.skipInstruction();) {
// Allow copies to/from Reg.
if (isFullCopyOf(MI, Reg))
continue;
// Allow stack slot loads.
int FI;
if (SnipLI.reg == TII.isLoadFromStackSlot(MI, FI) && FI == StackSlot)
continue;
// Allow stack slot stores.
if (SnipLI.reg == TII.isStoreToStackSlot(MI, FI) && FI == StackSlot)
continue;
// Allow a single additional instruction.
if (UseMI && MI != UseMI)
return false;
UseMI = MI;
}
return true;
}
/// collectRegsToSpill - Collect live range snippets that only have a single
/// real use.
void InlineSpiller::collectRegsToSpill() {
unsigned Reg = Edit->getReg();
// Main register always spills.
RegsToSpill.assign(1, Reg);
SnippetCopies.clear();
// Snippets all have the same original, so there can't be any for an original
// register.
Jakob Stoklund Olesen
committed
if (Original == Reg)
for (MachineRegisterInfo::reg_iterator RI = MRI.reg_begin(Reg);
MachineInstr *MI = RI.skipInstruction();) {
unsigned SnipReg = isFullCopyOf(MI, Reg);
Jakob Stoklund Olesen
committed
if (!isSibling(SnipReg))
LiveInterval &SnipLI = LIS.getInterval(SnipReg);
if (!isSnippet(SnipLI))
continue;
SnippetCopies.insert(MI);
if (isRegToSpill(SnipReg))
continue;
RegsToSpill.push_back(SnipReg);
DEBUG(dbgs() << "\talso spill snippet " << SnipLI << '\n');
++NumSnippets;
Jakob Stoklund Olesen
committed
//===----------------------------------------------------------------------===//
// Sibling Values
//===----------------------------------------------------------------------===//
// After live range splitting, some values to be spilled may be defined by
// copies from sibling registers. We trace the sibling copies back to the
// original value if it still exists. We need it for rematerialization.
//
// Even when the value can't be rematerialized, we still want to determine if
// the value has already been spilled, or we may want to hoist the spill from a
// loop.
Jakob Stoklund Olesen
committed
bool InlineSpiller::isSibling(unsigned Reg) {
return TargetRegisterInfo::isVirtualRegister(Reg) &&
VRM.getOriginal(Reg) == Original;
}
Jakob Stoklund Olesen
committed
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
#ifndef NDEBUG
static raw_ostream &operator<<(raw_ostream &OS,
const InlineSpiller::SibValueInfo &SVI) {
OS << "spill " << PrintReg(SVI.SpillReg) << ':'
<< SVI.SpillVNI->id << '@' << SVI.SpillVNI->def;
if (SVI.SpillMBB)
OS << " in BB#" << SVI.SpillMBB->getNumber();
if (SVI.AllDefsAreReloads)
OS << " all-reloads";
if (SVI.DefByOrigPHI)
OS << " orig-phi";
OS << " deps[";
for (unsigned i = 0, e = SVI.Deps.size(); i != e; ++i)
OS << ' ' << SVI.Deps[i]->id << '@' << SVI.Deps[i]->def;
OS << " ]";
if (SVI.DefMI)
OS << " def: " << *SVI.DefMI;
else
OS << '\n';
return OS;
}
#endif
/// propagateSiblingValue - Propagate the value in SVI to dependents if it is
/// known. Otherwise remember the dependency for later.
///
/// @param SVI SibValues entry to propagate.
/// @param VNI Dependent value, or NULL to propagate to all saved dependents.
void InlineSpiller::propagateSiblingValue(SibValueMap::iterator SVI,
VNInfo *VNI) {
// When VNI is non-NULL, add it to SVI's deps, and only propagate to that.
TinyPtrVector<VNInfo*> FirstDeps;
if (VNI) {
FirstDeps.push_back(VNI);
SVI->second.Deps.push_back(VNI);
}
// Has the value been completely determined yet? If not, defer propagation.
if (!SVI->second.hasDef())
return;
// Work list of values to propagate. It would be nice to use a SetVector
// here, but then we would be forced to use a SmallSet.
SmallVector<SibValueMap::iterator, 8> WorkList(1, SVI);
SmallPtrSet<VNInfo*, 8> WorkSet;
do {
SVI = WorkList.pop_back_val();
WorkSet.erase(SVI->first);
TinyPtrVector<VNInfo*> *Deps = VNI ? &FirstDeps : &SVI->second.Deps;
VNI = 0;
SibValueInfo &SV = SVI->second;
if (!SV.SpillMBB)
SV.SpillMBB = LIS.getMBBFromIndex(SV.SpillVNI->def);
DEBUG(dbgs() << " prop to " << Deps->size() << ": "
<< SVI->first->id << '@' << SVI->first->def << ":\t" << SV);
assert(SV.hasDef() && "Propagating undefined value");
// Should this value be propagated as a preferred spill candidate? We don't
// propagate values of registers that are about to spill.
bool PropSpill = !isRegToSpill(SV.SpillReg);
unsigned SpillDepth = ~0u;
for (TinyPtrVector<VNInfo*>::iterator DepI = Deps->begin(),
DepE = Deps->end(); DepI != DepE; ++DepI) {
SibValueMap::iterator DepSVI = SibValues.find(*DepI);
assert(DepSVI != SibValues.end() && "Dependent value not in SibValues");
SibValueInfo &DepSV = DepSVI->second;
if (!DepSV.SpillMBB)
DepSV.SpillMBB = LIS.getMBBFromIndex(DepSV.SpillVNI->def);
bool Changed = false;
// Propagate defining instruction.
if (!DepSV.hasDef()) {
Changed = true;
DepSV.DefMI = SV.DefMI;
DepSV.DefByOrigPHI = SV.DefByOrigPHI;
}
// Propagate AllDefsAreReloads. For PHI values, this computes an AND of
// all predecessors.
if (!SV.AllDefsAreReloads && DepSV.AllDefsAreReloads) {
Changed = true;
DepSV.AllDefsAreReloads = false;
}
// Propagate best spill value.
if (PropSpill && SV.SpillVNI != DepSV.SpillVNI) {
if (SV.SpillMBB == DepSV.SpillMBB) {
// DepSV is in the same block. Hoist when dominated.
if (SV.SpillVNI->def < DepSV.SpillVNI->def) {
// This is an alternative def earlier in the same MBB.
// Hoist the spill as far as possible in SpillMBB. This can ease
// register pressure:
//
// x = def
// y = use x
// s = copy x
//
// Hoisting the spill of s to immediately after the def removes the
// interference between x and y:
//
// x = def
// spill x
// y = use x<kill>
//
Changed = true;
DepSV.SpillReg = SV.SpillReg;
DepSV.SpillVNI = SV.SpillVNI;
DepSV.SpillMBB = SV.SpillMBB;
}
} else {
// DepSV is in a different block.
if (SpillDepth == ~0u)
SpillDepth = Loops.getLoopDepth(SV.SpillMBB);
// Also hoist spills to blocks with smaller loop depth, but make sure
// that the new value dominates. Non-phi dependents are always
// dominated, phis need checking.
if ((Loops.getLoopDepth(DepSV.SpillMBB) > SpillDepth) &&
(!DepSVI->first->isPHIDef() ||
MDT.dominates(SV.SpillMBB, DepSV.SpillMBB))) {
Changed = true;
DepSV.SpillReg = SV.SpillReg;
DepSV.SpillVNI = SV.SpillVNI;
DepSV.SpillMBB = SV.SpillMBB;
}
}
}
if (!Changed)
continue;
// Something changed in DepSVI. Propagate to dependents.
if (WorkSet.insert(DepSVI->first))
WorkList.push_back(DepSVI);
DEBUG(dbgs() << " update " << DepSVI->first->id << '@'
<< DepSVI->first->def << " to:\t" << DepSV);
}
} while (!WorkList.empty());
}
Jakob Stoklund Olesen
committed
/// traceSiblingValue - Trace a value that is about to be spilled back to the
/// real defining instructions by looking through sibling copies. Always stay
/// within the range of OrigVNI so the registers are known to carry the same
/// value.
///
/// Determine if the value is defined by all reloads, so spilling isn't
/// necessary - the value is already in the stack slot.
///
Jakob Stoklund Olesen
committed
/// Return a defining instruction that may be a candidate for rematerialization.
Jakob Stoklund Olesen
committed
///
Jakob Stoklund Olesen
committed
MachineInstr *InlineSpiller::traceSiblingValue(unsigned UseReg, VNInfo *UseVNI,
VNInfo *OrigVNI) {
Jakob Stoklund Olesen
committed
// Check if a cached value already exists.
SibValueMap::iterator SVI;
bool Inserted;
tie(SVI, Inserted) =
SibValues.insert(std::make_pair(UseVNI, SibValueInfo(UseReg, UseVNI)));
if (!Inserted) {
DEBUG(dbgs() << "Cached value " << PrintReg(UseReg) << ':'
<< UseVNI->id << '@' << UseVNI->def << ' ' << SVI->second);
return SVI->second.DefMI;
}
Jakob Stoklund Olesen
committed
DEBUG(dbgs() << "Tracing value " << PrintReg(UseReg) << ':'
<< UseVNI->id << '@' << UseVNI->def << '\n');
Jakob Stoklund Olesen
committed
// List of (Reg, VNI) that have been inserted into SibValues, but need to be
// processed.
Jakob Stoklund Olesen
committed
SmallVector<std::pair<unsigned, VNInfo*>, 8> WorkList;
Jakob Stoklund Olesen
committed
WorkList.push_back(std::make_pair(UseReg, UseVNI));
do {
unsigned Reg;
VNInfo *VNI;
tie(Reg, VNI) = WorkList.pop_back_val();
Jakob Stoklund Olesen
committed
DEBUG(dbgs() << " " << PrintReg(Reg) << ':' << VNI->id << '@' << VNI->def
<< ":\t");
Jakob Stoklund Olesen
committed
Jakob Stoklund Olesen
committed
// First check if this value has already been computed.
SVI = SibValues.find(VNI);
assert(SVI != SibValues.end() && "Missing SibValues entry");
Jakob Stoklund Olesen
committed
// Trace through PHI-defs created by live range splitting.
if (VNI->isPHIDef()) {
if (VNI->def == OrigVNI->def) {
Jakob Stoklund Olesen
committed
DEBUG(dbgs() << "orig phi value\n");
SVI->second.DefByOrigPHI = true;
SVI->second.AllDefsAreReloads = false;
propagateSiblingValue(SVI);
Jakob Stoklund Olesen
committed
continue;
}
// Get values live-out of predecessors.
LiveInterval &LI = LIS.getInterval(Reg);
MachineBasicBlock *MBB = LIS.getMBBFromIndex(VNI->def);
Jakob Stoklund Olesen
committed
DEBUG(dbgs() << "split phi value, check " << MBB->pred_size()
<< " preds\n");
Jakob Stoklund Olesen
committed
for (MachineBasicBlock::pred_iterator PI = MBB->pred_begin(),
PE = MBB->pred_end(); PI != PE; ++PI) {
Jakob Stoklund Olesen
committed
// Use a cache of block live-out values. This is faster than using
// getVNInfoAt on complex intervals.
VNInfo *&PVNI = LiveOutValues[*PI];
if (!PVNI)
PVNI = LI.getVNInfoAt(LIS.getMBBEndIdx(*PI).getPrevSlot());
if (!PVNI)
continue;
// Known predecessor value? Try an insertion.
tie(SVI, Inserted) =
SibValues.insert(std::make_pair(PVNI, SibValueInfo(Reg, PVNI)));
// This is the first time we see PVNI, add it to the worklist.
if (Inserted)
Jakob Stoklund Olesen
committed
WorkList.push_back(std::make_pair(Reg, PVNI));
Jakob Stoklund Olesen
committed
propagateSiblingValue(SVI, VNI);
Jakob Stoklund Olesen
committed
}
Jakob Stoklund Olesen
committed
// Next work list item.
Jakob Stoklund Olesen
committed
continue;
}
MachineInstr *MI = LIS.getInstructionFromIndex(VNI->def);
assert(MI && "Missing def");
// Trace through sibling copies.
if (unsigned SrcReg = isFullCopyOf(MI, Reg)) {
Jakob Stoklund Olesen
committed
if (isSibling(SrcReg)) {
Jakob Stoklund Olesen
committed
LiveInterval &SrcLI = LIS.getInterval(SrcReg);
VNInfo *SrcVNI = SrcLI.getVNInfoAt(VNI->def.getUseIndex());
assert(SrcVNI && "Copy from non-existing value");
Jakob Stoklund Olesen
committed
DEBUG(dbgs() << "copy of " << PrintReg(SrcReg) << ':'
Jakob Stoklund Olesen
committed
<< SrcVNI->id << '@' << SrcVNI->def << '\n');
Jakob Stoklund Olesen
committed
// Known sibling source value? Try an insertion.
tie(SVI, Inserted) = SibValues.insert(std::make_pair(SrcVNI,
SibValueInfo(SrcReg, SrcVNI)));
// This is the first time we see Src, add it to the worklist.
if (Inserted)
WorkList.push_back(std::make_pair(SrcReg, SrcVNI));
propagateSiblingValue(SVI, VNI);
// Next work list item.
Jakob Stoklund Olesen
committed
continue;
}
}
// Track reachable reloads.
Jakob Stoklund Olesen
committed
SVI->second.DefMI = MI;
SVI->second.SpillMBB = MI->getParent();
Jakob Stoklund Olesen
committed
int FI;
if (Reg == TII.isLoadFromStackSlot(MI, FI) && FI == StackSlot) {
Jakob Stoklund Olesen
committed
DEBUG(dbgs() << "reload\n");
propagateSiblingValue(SVI);
// Next work list item.
Jakob Stoklund Olesen
committed
continue;
}
// Potential remat candidate.
Jakob Stoklund Olesen
committed
DEBUG(dbgs() << "def " << *MI);
SVI->second.AllDefsAreReloads = false;
propagateSiblingValue(SVI);
Jakob Stoklund Olesen
committed
} while (!WorkList.empty());
Jakob Stoklund Olesen
committed
// Look up the value we were looking for. We already did this lokup at the
// top of the function, but SibValues may have been invalidated.
SVI = SibValues.find(UseVNI);
assert(SVI != SibValues.end() && "Didn't compute requested info");
DEBUG(dbgs() << " traced to:\t" << SVI->second);
return SVI->second.DefMI;
Jakob Stoklund Olesen
committed
}
/// analyzeSiblingValues - Trace values defined by sibling copies back to
/// something that isn't a sibling copy.
Jakob Stoklund Olesen
committed
///
/// Keep track of values that may be rematerializable.
Jakob Stoklund Olesen
committed
void InlineSpiller::analyzeSiblingValues() {
SibValues.clear();
Jakob Stoklund Olesen
committed
LiveOutValues.clear();
Jakob Stoklund Olesen
committed
// No siblings at all?
if (Edit->getReg() == Original)
return;
LiveInterval &OrigLI = LIS.getInterval(Original);
for (unsigned i = 0, e = RegsToSpill.size(); i != e; ++i) {
unsigned Reg = RegsToSpill[i];
LiveInterval &LI = LIS.getInterval(Reg);
for (LiveInterval::const_vni_iterator VI = LI.vni_begin(),
VE = LI.vni_end(); VI != VE; ++VI) {
VNInfo *VNI = *VI;
Jakob Stoklund Olesen
committed
if (VNI->isUnused())
Jakob Stoklund Olesen
committed
continue;
Jakob Stoklund Olesen
committed
MachineInstr *DefMI = 0;
// Check possible sibling copies.
if (VNI->isPHIDef() || VNI->getCopy()) {
VNInfo *OrigVNI = OrigLI.getVNInfoAt(VNI->def);
assert(OrigVNI && "Def outside original live range");
Jakob Stoklund Olesen
committed
if (OrigVNI->def != VNI->def)
DefMI = traceSiblingValue(Reg, VNI, OrigVNI);
}
if (!DefMI && !VNI->isPHIDef())
DefMI = LIS.getInstructionFromIndex(VNI->def);
if (DefMI && Edit->checkRematerializable(VNI, DefMI, TII, AA)) {
DEBUG(dbgs() << "Value " << PrintReg(Reg) << ':' << VNI->id << '@'
<< VNI->def << " may remat from " << *DefMI);
}
Jakob Stoklund Olesen
committed
}
}
}
Jakob Stoklund Olesen
committed
/// hoistSpill - Given a sibling copy that defines a value to be spilled, insert
/// a spill at a better location.
bool InlineSpiller::hoistSpill(LiveInterval &SpillLI, MachineInstr *CopyMI) {
SlotIndex Idx = LIS.getInstructionIndex(CopyMI);
VNInfo *VNI = SpillLI.getVNInfoAt(Idx.getDefIndex());
assert(VNI && VNI->def == Idx.getDefIndex() && "Not defined by copy");
SibValueMap::iterator I = SibValues.find(VNI);
Jakob Stoklund Olesen
committed
if (I == SibValues.end())
return false;
const SibValueInfo &SVI = I->second;
// Let the normal folding code deal with the boring case.
if (!SVI.AllDefsAreReloads && SVI.SpillVNI == VNI)
return false;
// SpillReg may have been deleted by remat and DCE.
if (!LIS.hasInterval(SVI.SpillReg)) {
DEBUG(dbgs() << "Stale interval: " << PrintReg(SVI.SpillReg) << '\n');
SibValues.erase(I);
return false;
}
LiveInterval &SibLI = LIS.getInterval(SVI.SpillReg);
if (!SibLI.containsValue(SVI.SpillVNI)) {
DEBUG(dbgs() << "Stale value: " << PrintReg(SVI.SpillReg) << '\n');
SibValues.erase(I);
return false;
}
Jakob Stoklund Olesen
committed
// Conservatively extend the stack slot range to the range of the original
// value. We may be able to do better with stack slot coloring by being more
// careful here.
assert(StackInt && "No stack slot assigned yet.");
Jakob Stoklund Olesen
committed
LiveInterval &OrigLI = LIS.getInterval(Original);
VNInfo *OrigVNI = OrigLI.getVNInfoAt(Idx);
StackInt->MergeValueInAsValue(OrigLI, OrigVNI, StackInt->getValNumInfo(0));
DEBUG(dbgs() << "\tmerged orig valno " << OrigVNI->id << ": "
<< *StackInt << '\n');
Jakob Stoklund Olesen
committed
// Already spilled everywhere.
if (SVI.AllDefsAreReloads) {
Jakob Stoklund Olesen
committed
DEBUG(dbgs() << "\tno spill needed: " << SVI);
++NumOmitReloadSpill;
Jakob Stoklund Olesen
committed
return true;
Jakob Stoklund Olesen
committed
// We are going to spill SVI.SpillVNI immediately after its def, so clear out
// any later spills of the same value.
eliminateRedundantSpills(SibLI, SVI.SpillVNI);
Jakob Stoklund Olesen
committed
MachineBasicBlock *MBB = LIS.getMBBFromIndex(SVI.SpillVNI->def);
MachineBasicBlock::iterator MII;
if (SVI.SpillVNI->isPHIDef())
MII = MBB->SkipPHIsAndLabels(MBB->begin());
else {
MachineInstr *DefMI = LIS.getInstructionFromIndex(SVI.SpillVNI->def);
assert(DefMI && "Defining instruction disappeared");
MII = DefMI;
Jakob Stoklund Olesen
committed
++MII;
}
// Insert spill without kill flag immediately after def.
TII.storeRegToStackSlot(*MBB, MII, SVI.SpillReg, false, StackSlot,
MRI.getRegClass(SVI.SpillReg), &TRI);
Jakob Stoklund Olesen
committed
--MII; // Point to store instruction.
LIS.InsertMachineInstrInMaps(MII);
VRM.addSpillSlotUse(StackSlot, MII);
DEBUG(dbgs() << "\thoisted: " << SVI.SpillVNI->def << '\t' << *MII);
if (MBB == CopyMI->getParent())
++NumHoistLocal;
else
++NumHoistGlobal;
Jakob Stoklund Olesen
committed
return true;
}
Jakob Stoklund Olesen
committed
/// eliminateRedundantSpills - SLI:VNI is known to be on the stack. Remove any
/// redundant spills of this value in SLI.reg and sibling copies.
void InlineSpiller::eliminateRedundantSpills(LiveInterval &SLI, VNInfo *VNI) {
assert(VNI && "Missing value");
Jakob Stoklund Olesen
committed
SmallVector<std::pair<LiveInterval*, VNInfo*>, 8> WorkList;
WorkList.push_back(std::make_pair(&SLI, VNI));
assert(StackInt && "No stack slot assigned yet.");
Jakob Stoklund Olesen
committed
do {
Jakob Stoklund Olesen
committed
LiveInterval *LI;
tie(LI, VNI) = WorkList.pop_back_val();
unsigned Reg = LI->reg;
DEBUG(dbgs() << "Checking redundant spills for "
<< VNI->id << '@' << VNI->def << " in " << *LI << '\n');
Jakob Stoklund Olesen
committed
// Regs to spill are taken care of.
if (isRegToSpill(Reg))
continue;
// Add all of VNI's live range to StackInt.
StackInt->MergeValueInAsValue(*LI, VNI, StackInt->getValNumInfo(0));
DEBUG(dbgs() << "Merged to stack int: " << *StackInt << '\n');
Jakob Stoklund Olesen
committed
// Find all spills and copies of VNI.
for (MachineRegisterInfo::use_nodbg_iterator UI = MRI.use_nodbg_begin(Reg);
MachineInstr *MI = UI.skipInstruction();) {
if (!MI->isCopy() && !MI->getDesc().mayStore())
continue;
SlotIndex Idx = LIS.getInstructionIndex(MI);
Jakob Stoklund Olesen
committed
if (LI->getVNInfoAt(Idx) != VNI)
Jakob Stoklund Olesen
committed
continue;
// Follow sibling copies down the dominator tree.
if (unsigned DstReg = isFullCopyOf(MI, Reg)) {
if (isSibling(DstReg)) {
LiveInterval &DstLI = LIS.getInterval(DstReg);
VNInfo *DstVNI = DstLI.getVNInfoAt(Idx.getDefIndex());
assert(DstVNI && "Missing defined value");
assert(DstVNI->def == Idx.getDefIndex() && "Wrong copy def slot");
Jakob Stoklund Olesen
committed
WorkList.push_back(std::make_pair(&DstLI, DstVNI));
Jakob Stoklund Olesen
committed
}
continue;
}
// Erase spills.
int FI;
if (Reg == TII.isStoreToStackSlot(MI, FI) && FI == StackSlot) {
DEBUG(dbgs() << "Redundant spill " << Idx << '\t' << *MI);
// eliminateDeadDefs won't normally remove stores, so switch opcode.
MI->setDesc(TII.get(TargetOpcode::KILL));
DeadDefs.push_back(MI);
++NumRedundantSpills;
Jakob Stoklund Olesen
committed
}
}
} while (!WorkList.empty());
}
Jakob Stoklund Olesen
committed
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
//===----------------------------------------------------------------------===//
// Rematerialization
//===----------------------------------------------------------------------===//
/// markValueUsed - Remember that VNI failed to rematerialize, so its defining
/// instruction cannot be eliminated. See through snippet copies
void InlineSpiller::markValueUsed(LiveInterval *LI, VNInfo *VNI) {
SmallVector<std::pair<LiveInterval*, VNInfo*>, 8> WorkList;
WorkList.push_back(std::make_pair(LI, VNI));
do {
tie(LI, VNI) = WorkList.pop_back_val();
if (!UsedValues.insert(VNI))
continue;
if (VNI->isPHIDef()) {
MachineBasicBlock *MBB = LIS.getMBBFromIndex(VNI->def);
for (MachineBasicBlock::pred_iterator PI = MBB->pred_begin(),
PE = MBB->pred_end(); PI != PE; ++PI) {
VNInfo *PVNI = LI->getVNInfoAt(LIS.getMBBEndIdx(*PI).getPrevSlot());
if (PVNI)
WorkList.push_back(std::make_pair(LI, PVNI));
}
continue;
}
// Follow snippet copies.
MachineInstr *MI = LIS.getInstructionFromIndex(VNI->def);
if (!SnippetCopies.count(MI))
continue;
LiveInterval &SnipLI = LIS.getInterval(MI->getOperand(1).getReg());
assert(isRegToSpill(SnipLI.reg) && "Unexpected register in copy");
VNInfo *SnipVNI = SnipLI.getVNInfoAt(VNI->def.getUseIndex());
assert(SnipVNI && "Snippet undefined before copy");
WorkList.push_back(std::make_pair(&SnipLI, SnipVNI));
} while (!WorkList.empty());
}
/// reMaterializeFor - Attempt to rematerialize before MI instead of reloading.
Jakob Stoklund Olesen
committed
bool InlineSpiller::reMaterializeFor(LiveInterval &VirtReg,
MachineBasicBlock::iterator MI) {
SlotIndex UseIdx = LIS.getInstructionIndex(MI).getUseIndex();
VNInfo *ParentVNI = VirtReg.getVNInfoAt(UseIdx.getBaseIndex());
Jakob Stoklund Olesen
committed
Jakob Stoklund Olesen
committed
if (!ParentVNI) {
Jakob Stoklund Olesen
committed
DEBUG(dbgs() << "\tadding <undef> flags: ");
for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
MachineOperand &MO = MI->getOperand(i);
Jakob Stoklund Olesen
committed
if (MO.isReg() && MO.isUse() && MO.getReg() == VirtReg.reg)
MO.setIsUndef();
}
Jakob Stoklund Olesen
committed
DEBUG(dbgs() << UseIdx << '\t' << *MI);
return true;
}
Jakob Stoklund Olesen
committed
Jakob Stoklund Olesen
committed
if (SnippetCopies.count(MI))
Jakob Stoklund Olesen
committed
// Use an OrigVNI from traceSiblingValue when ParentVNI is a sibling copy.
LiveRangeEdit::Remat RM(ParentVNI);
SibValueMap::const_iterator SibI = SibValues.find(ParentVNI);
if (SibI != SibValues.end())
RM.OrigMI = SibI->second.DefMI;
if (!Edit->canRematerializeAt(RM, UseIdx, false, LIS)) {
Jakob Stoklund Olesen
committed
markValueUsed(&VirtReg, ParentVNI);
Jakob Stoklund Olesen
committed
DEBUG(dbgs() << "\tcannot remat for " << UseIdx << '\t' << *MI);
return false;
Jakob Stoklund Olesen
committed
}
Jakob Stoklund Olesen
committed
// If the instruction also writes VirtReg.reg, it had better not require the
// same register for uses and defs.
Jakob Stoklund Olesen
committed
bool Reads, Writes;
SmallVector<unsigned, 8> Ops;
Jakob Stoklund Olesen
committed
tie(Reads, Writes) = MI->readsWritesVirtualRegister(VirtReg.reg, &Ops);
Jakob Stoklund Olesen
committed
if (Writes) {
for (unsigned i = 0, e = Ops.size(); i != e; ++i) {
MachineOperand &MO = MI->getOperand(Ops[i]);
if (MO.isUse() ? MI->isRegTiedToDefOperand(Ops[i]) : MO.getSubReg()) {
Jakob Stoklund Olesen
committed
markValueUsed(&VirtReg, ParentVNI);
Jakob Stoklund Olesen
committed
DEBUG(dbgs() << "\tcannot remat tied reg: " << UseIdx << '\t' << *MI);
return false;
Jakob Stoklund Olesen
committed
}
Jakob Stoklund Olesen
committed
// Before rematerializing into a register for a single instruction, try to
// fold a load into the instruction. That avoids allocating a new register.
if (RM.OrigMI->getDesc().canFoldAsLoad() &&
foldMemoryOperand(MI, Ops, RM.OrigMI)) {
Edit->markRematerialized(RM.ParentVNI);
++NumFoldedLoads;
Jakob Stoklund Olesen
committed
return true;
}
Jakob Stoklund Olesen
committed
// Alocate a new register for the remat.
Jakob Stoklund Olesen
committed
LiveInterval &NewLI = Edit->createFrom(Original, LIS, VRM);
Jakob Stoklund Olesen
committed
NewLI.markNotSpillable();
// Finally we can rematerialize OrigMI before MI.
SlotIndex DefIdx = Edit->rematerializeAt(*MI->getParent(), MI, NewLI.reg, RM,
LIS, TII, TRI);
Jakob Stoklund Olesen
committed
DEBUG(dbgs() << "\tremat: " << DefIdx << '\t'
<< *LIS.getInstructionFromIndex(DefIdx));
Jakob Stoklund Olesen
committed
// Replace operands
for (unsigned i = 0, e = Ops.size(); i != e; ++i) {
MachineOperand &MO = MI->getOperand(Ops[i]);
Jakob Stoklund Olesen
committed
if (MO.isReg() && MO.isUse() && MO.getReg() == VirtReg.reg) {
Jakob Stoklund Olesen
committed
MO.setReg(NewLI.reg);
Jakob Stoklund Olesen
committed
MO.setIsKill();
}
}
DEBUG(dbgs() << "\t " << UseIdx << '\t' << *MI);
VNInfo *DefVNI = NewLI.getNextValue(DefIdx, 0, LIS.getVNInfoAllocator());
NewLI.addRange(LiveRange(DefIdx, UseIdx.getDefIndex(), DefVNI));
Jakob Stoklund Olesen
committed
DEBUG(dbgs() << "\tinterval: " << NewLI << '\n');
++NumRemats;
return true;
}
Jakob Stoklund Olesen
committed
/// reMaterializeAll - Try to rematerialize as many uses as possible,
Jakob Stoklund Olesen
committed
/// and trim the live ranges after.
void InlineSpiller::reMaterializeAll() {
Jakob Stoklund Olesen
committed
// analyzeSiblingValues has already tested all relevant defining instructions.
if (!Edit->anyRematerializable(LIS, TII, AA))
Jakob Stoklund Olesen
committed
return;
UsedValues.clear();
Jakob Stoklund Olesen
committed
Jakob Stoklund Olesen
committed
// Try to remat before all uses of snippets.
Jakob Stoklund Olesen
committed
bool anyRemat = false;
Jakob Stoklund Olesen
committed
for (unsigned i = 0, e = RegsToSpill.size(); i != e; ++i) {
unsigned Reg = RegsToSpill[i];
LiveInterval &LI = LIS.getInterval(Reg);
for (MachineRegisterInfo::use_nodbg_iterator
RI = MRI.use_nodbg_begin(Reg);
MachineInstr *MI = RI.skipInstruction();)
anyRemat |= reMaterializeFor(LI, MI);
}
Jakob Stoklund Olesen
committed
if (!anyRemat)
return;
// Remove any values that were completely rematted.
Jakob Stoklund Olesen
committed
for (unsigned i = 0, e = RegsToSpill.size(); i != e; ++i) {
unsigned Reg = RegsToSpill[i];
LiveInterval &LI = LIS.getInterval(Reg);
for (LiveInterval::vni_iterator I = LI.vni_begin(), E = LI.vni_end();
I != E; ++I) {
VNInfo *VNI = *I;
if (VNI->isUnused() || VNI->isPHIDef() || UsedValues.count(VNI))
Jakob Stoklund Olesen
committed
continue;
MachineInstr *MI = LIS.getInstructionFromIndex(VNI->def);
MI->addRegisterDead(Reg, &TRI);
if (!MI->allDefsAreDead())
Jakob Stoklund Olesen
committed
DEBUG(dbgs() << "All defs dead: " << *MI);
DeadDefs.push_back(MI);
Jakob Stoklund Olesen
committed
}
// Eliminate dead code after remat. Note that some snippet copies may be
// deleted here.
if (DeadDefs.empty())
return;
DEBUG(dbgs() << "Remat created " << DeadDefs.size() << " dead defs.\n");
Edit->eliminateDeadDefs(DeadDefs, LIS, VRM, TII);
// Get rid of deleted and empty intervals.
for (unsigned i = RegsToSpill.size(); i != 0; --i) {
unsigned Reg = RegsToSpill[i-1];
if (!LIS.hasInterval(Reg)) {
RegsToSpill.erase(RegsToSpill.begin() + (i - 1));
continue;
}
LiveInterval &LI = LIS.getInterval(Reg);
if (!LI.empty())
continue;
Edit->eraseVirtReg(Reg, LIS);
RegsToSpill.erase(RegsToSpill.begin() + (i - 1));
}
DEBUG(dbgs() << RegsToSpill.size() << " registers to spill after remat.\n");
Jakob Stoklund Olesen
committed
}
Jakob Stoklund Olesen
committed
//===----------------------------------------------------------------------===//
// Spilling
//===----------------------------------------------------------------------===//
/// If MI is a load or store of StackSlot, it can be removed.
bool InlineSpiller::coalesceStackAccess(MachineInstr *MI, unsigned Reg) {
Jakob Stoklund Olesen
committed
int FI = 0;
if (!(InstrReg = TII.isLoadFromStackSlot(MI, FI)) &&
!(InstrReg = TII.isStoreToStackSlot(MI, FI)))
Jakob Stoklund Olesen
committed
return false;
// We have a stack access. Is it the right register and slot?
if (InstrReg != Reg || FI != StackSlot)
Jakob Stoklund Olesen
committed
return false;
DEBUG(dbgs() << "Coalescing stack access: " << *MI);
LIS.RemoveMachineInstrFromMaps(MI);
Jakob Stoklund Olesen
committed
MI->eraseFromParent();
return true;
}
/// foldMemoryOperand - Try folding stack slot references in Ops into MI.
Jakob Stoklund Olesen
committed
/// @param MI Instruction using or defining the current register.
/// @param Ops Operand indices from readsWritesVirtualRegister().
Jakob Stoklund Olesen
committed
/// @param LoadMI Load instruction to use instead of stack slot when non-null.
/// @return True on success, and MI will be erased.
bool InlineSpiller::foldMemoryOperand(MachineBasicBlock::iterator MI,
Jakob Stoklund Olesen
committed
const SmallVectorImpl<unsigned> &Ops,
MachineInstr *LoadMI) {
// TargetInstrInfo::foldMemoryOperand only expects explicit, non-tied
// operands.
SmallVector<unsigned, 8> FoldOps;
for (unsigned i = 0, e = Ops.size(); i != e; ++i) {
unsigned Idx = Ops[i];
MachineOperand &MO = MI->getOperand(Idx);
if (MO.isImplicit())
continue;
// FIXME: Teach targets to deal with subregs.
if (MO.getSubReg())
return false;
Jakob Stoklund Olesen
committed
// We cannot fold a load instruction into a def.
if (LoadMI && MO.isDef())
return false;
// Tied use operands should not be passed to foldMemoryOperand.
if (!MI->isRegTiedToDefOperand(Idx))
FoldOps.push_back(Idx);
}
Jakob Stoklund Olesen
committed
MachineInstr *FoldMI =
LoadMI ? TII.foldMemoryOperand(MI, FoldOps, LoadMI)
: TII.foldMemoryOperand(MI, FoldOps, StackSlot);
if (!FoldMI)
return false;
LIS.ReplaceMachineInstrInMaps(MI, FoldMI);
Jakob Stoklund Olesen
committed
if (!LoadMI)
VRM.addSpillSlotUse(StackSlot, FoldMI);
Jakob Stoklund Olesen
committed
MI->eraseFromParent();
DEBUG(dbgs() << "\tfolded: " << *FoldMI);
++NumFolded;
return true;