Newer
Older
//===-- PreAllocSplitting.cpp - Pre-allocation Interval Spltting Pass. ----===//
//
// The LLVM Compiler Infrastructure
//
// This file is distributed under the University of Illinois Open Source
// License. See LICENSE.TXT for details.
//
//===----------------------------------------------------------------------===//
//
// This file implements the machine instruction level pre-register allocation
// live interval splitting pass. It finds live interval barriers, i.e.
// instructions which will kill all physical registers in certain register
// classes, and split all live intervals which cross the barrier.
//
//===----------------------------------------------------------------------===//
#define DEBUG_TYPE "pre-alloc-split"
#include "llvm/CodeGen/LiveIntervalAnalysis.h"
#include "llvm/CodeGen/LiveStackAnalysis.h"
#include "llvm/CodeGen/MachineDominators.h"
Evan Cheng
committed
#include "llvm/CodeGen/MachineFrameInfo.h"
#include "llvm/CodeGen/MachineFunctionPass.h"
#include "llvm/CodeGen/MachineLoopInfo.h"
#include "llvm/CodeGen/MachineRegisterInfo.h"
#include "llvm/CodeGen/Passes.h"
#include "llvm/CodeGen/RegisterCoalescer.h"
Evan Cheng
committed
#include "llvm/Target/TargetInstrInfo.h"
#include "llvm/Target/TargetMachine.h"
#include "llvm/Target/TargetOptions.h"
#include "llvm/Target/TargetRegisterInfo.h"
#include "llvm/Support/CommandLine.h"
#include "llvm/Support/Debug.h"
#include "llvm/ADT/DenseMap.h"
Evan Cheng
committed
#include "llvm/ADT/DepthFirstIterator.h"
#include "llvm/ADT/SmallPtrSet.h"
Evan Cheng
committed
#include "llvm/ADT/Statistic.h"
using namespace llvm;
static cl::opt<int> PreSplitLimit("pre-split-limit", cl::init(-1), cl::Hidden);
STATISTIC(NumSplits, "Number of intervals split");
STATISTIC(NumRemats, "Number of intervals split by rematerialization");
Owen Anderson
committed
STATISTIC(NumFolds, "Number of intervals split with spill folding");
STATISTIC(NumRenumbers, "Number of intervals renumbered into new registers");
Evan Cheng
committed
namespace {
class VISIBILITY_HIDDEN PreAllocSplitting : public MachineFunctionPass {
MachineFunction *CurrMF;
Evan Cheng
committed
const TargetMachine *TM;
const TargetInstrInfo *TII;
MachineFrameInfo *MFI;
MachineRegisterInfo *MRI;
LiveIntervals *LIs;
LiveStacks *LSs;
Evan Cheng
committed
// Barrier - Current barrier being processed.
MachineInstr *Barrier;
// BarrierMBB - Basic block where the barrier resides in.
MachineBasicBlock *BarrierMBB;
// Barrier - Current barrier index.
unsigned BarrierIdx;
// CurrLI - Current live interval being split.
LiveInterval *CurrLI;
// CurrSLI - Current stack slot live interval.
LiveInterval *CurrSLI;
// CurrSValNo - Current val# for the stack slot live interval.
VNInfo *CurrSValNo;
// IntervalSSMap - A map from live interval to spill slots.
DenseMap<unsigned, int> IntervalSSMap;
Evan Cheng
committed
// Def2SpillMap - A map from a def instruction index to spill index.
DenseMap<unsigned, unsigned> Def2SpillMap;
Evan Cheng
committed
public:
static char ID;
PreAllocSplitting() : MachineFunctionPass(&ID) {}
virtual bool runOnMachineFunction(MachineFunction &MF);
virtual void getAnalysisUsage(AnalysisUsage &AU) const {
AU.addRequired<LiveIntervals>();
AU.addPreserved<LiveIntervals>();
AU.addRequired<LiveStacks>();
AU.addPreserved<LiveStacks>();
AU.addPreserved<RegisterCoalescer>();
if (StrongPHIElim)
AU.addPreservedID(StrongPHIEliminationID);
else
AU.addPreservedID(PHIEliminationID);
AU.addRequired<MachineDominatorTree>();
AU.addRequired<MachineLoopInfo>();
AU.addPreserved<MachineDominatorTree>();
AU.addPreserved<MachineLoopInfo>();
MachineFunctionPass::getAnalysisUsage(AU);
}
virtual void releaseMemory() {
IntervalSSMap.clear();
Evan Cheng
committed
Def2SpillMap.clear();
}
virtual const char *getPassName() const {
return "Pre-Register Allocaton Live Interval Splitting";
}
Evan Cheng
committed
/// print - Implement the dump method.
virtual void print(std::ostream &O, const Module* M = 0) const {
LIs->print(O, M);
}
void print(std::ostream *O, const Module* M = 0) const {
if (O) print(*O, M);
}
private:
MachineBasicBlock::iterator
findNextEmptySlot(MachineBasicBlock*, MachineInstr*,
unsigned&);
MachineBasicBlock::iterator
Evan Cheng
committed
findSpillPoint(MachineBasicBlock*, MachineInstr*, MachineInstr*,
Evan Cheng
committed
SmallPtrSet<MachineInstr*, 4>&, unsigned&);
MachineBasicBlock::iterator
Evan Cheng
committed
findRestorePoint(MachineBasicBlock*, MachineInstr*, unsigned,
Evan Cheng
committed
SmallPtrSet<MachineInstr*, 4>&, unsigned&);
int CreateSpillStackSlot(unsigned, const TargetRegisterClass *);
Evan Cheng
committed
bool IsAvailableInStack(MachineBasicBlock*, unsigned, unsigned, unsigned,
unsigned&, int&) const;
Evan Cheng
committed
void UpdateSpillSlotInterval(VNInfo*, unsigned, unsigned);
Evan Cheng
committed
VNInfo* UpdateRegisterInterval(VNInfo*, unsigned, unsigned);
Evan Cheng
committed
bool ShrinkWrapToLastUse(MachineBasicBlock*, VNInfo*,
Evan Cheng
committed
SmallVector<MachineOperand*, 4>&,
SmallPtrSet<MachineInstr*, 4>&);
Evan Cheng
committed
Evan Cheng
committed
void ShrinkWrapLiveInterval(VNInfo*, MachineBasicBlock*, MachineBasicBlock*,
Evan Cheng
committed
MachineBasicBlock*, SmallPtrSet<MachineBasicBlock*, 8>&,
Evan Cheng
committed
DenseMap<MachineBasicBlock*, SmallVector<MachineOperand*, 4> >&,
DenseMap<MachineBasicBlock*, SmallPtrSet<MachineInstr*, 4> >&,
SmallVector<MachineBasicBlock*, 4>&);
Evan Cheng
committed
bool SplitRegLiveInterval(LiveInterval*);
bool SplitRegLiveIntervals(const TargetRegisterClass **);
void RepairLiveInterval(LiveInterval* CurrLI, VNInfo* ValNo,
MachineInstr* DefMI, unsigned RestoreIdx);
bool createsNewJoin(LiveRange* LR, MachineBasicBlock* DefMBB,
MachineBasicBlock* BarrierMBB);
bool Rematerialize(unsigned vreg, VNInfo* ValNo,
MachineInstr* DefMI,
MachineBasicBlock::iterator RestorePt,
unsigned RestoreIdx,
SmallPtrSet<MachineInstr*, 4>& RefsInMBB);
Owen Anderson
committed
MachineInstr* FoldSpill(unsigned vreg, const TargetRegisterClass* RC,
MachineInstr* DefMI,
MachineInstr* Barrier,
MachineBasicBlock* MBB,
int& SS,
SmallPtrSet<MachineInstr*, 4>& RefsInMBB);
Owen Anderson
committed
void RenumberValno(VNInfo* VN);
Owen Anderson
committed
void ReconstructLiveInterval(LiveInterval* LI);
VNInfo* PerformPHIConstruction(MachineBasicBlock::iterator use,
LiveInterval* LI,
DenseMap<MachineBasicBlock*, SmallPtrSet<MachineInstr*, 2> >& Defs,
DenseMap<MachineBasicBlock*, SmallPtrSet<MachineInstr*, 2> >& Uses,
DenseMap<MachineInstr*, VNInfo*>& NewVNs,
DenseMap<MachineBasicBlock*, VNInfo*>& Visited,
bool toplevel = false);
};
} // end anonymous namespace
char PreAllocSplitting::ID = 0;
static RegisterPass<PreAllocSplitting>
X("pre-alloc-splitting", "Pre-Register Allocation Live Interval Splitting");
const PassInfo *const llvm::PreAllocSplittingID = &X;
Evan Cheng
committed
/// findNextEmptySlot - Find a gap after the given machine instruction in the
/// instruction index map. If there isn't one, return end().
MachineBasicBlock::iterator
PreAllocSplitting::findNextEmptySlot(MachineBasicBlock *MBB, MachineInstr *MI,
unsigned &SpotIndex) {
MachineBasicBlock::iterator MII = MI;
if (++MII != MBB->end()) {
unsigned Index = LIs->findGapBeforeInstr(LIs->getInstructionIndex(MII));
if (Index) {
SpotIndex = Index;
return MII;
}
}
return MBB->end();
}
/// findSpillPoint - Find a gap as far away from the given MI that's suitable
/// for spilling the current live interval. The index must be before any
/// defs and uses of the live interval register in the mbb. Return begin() if
/// none is found.
MachineBasicBlock::iterator
PreAllocSplitting::findSpillPoint(MachineBasicBlock *MBB, MachineInstr *MI,
Evan Cheng
committed
MachineInstr *DefMI,
Evan Cheng
committed
SmallPtrSet<MachineInstr*, 4> &RefsInMBB,
unsigned &SpillIndex) {
MachineBasicBlock::iterator Pt = MBB->begin();
// Go top down if RefsInMBB is empty.
Evan Cheng
committed
if (RefsInMBB.empty() && !DefMI) {
Evan Cheng
committed
MachineBasicBlock::iterator MII = MBB->begin();
MachineBasicBlock::iterator EndPt = MI;
do {
++MII;
unsigned Index = LIs->getInstructionIndex(MII);
unsigned Gap = LIs->findGapBeforeInstr(Index);
if (Gap) {
Pt = MII;
SpillIndex = Gap;
break;
}
} while (MII != EndPt);
} else {
MachineBasicBlock::iterator MII = MI;
Evan Cheng
committed
MachineBasicBlock::iterator EndPt = DefMI
? MachineBasicBlock::iterator(DefMI) : MBB->begin();
while (MII != EndPt && !RefsInMBB.count(MII)) {
Evan Cheng
committed
unsigned Index = LIs->getInstructionIndex(MII);
if (LIs->hasGapBeforeInstr(Index)) {
Pt = MII;
SpillIndex = LIs->findGapBeforeInstr(Index, true);
}
--MII;
}
}
return Pt;
}
/// findRestorePoint - Find a gap in the instruction index map that's suitable
/// for restoring the current live interval value. The index must be before any
/// uses of the live interval register in the mbb. Return end() if none is
/// found.
MachineBasicBlock::iterator
PreAllocSplitting::findRestorePoint(MachineBasicBlock *MBB, MachineInstr *MI,
Evan Cheng
committed
unsigned LastIdx,
Evan Cheng
committed
SmallPtrSet<MachineInstr*, 4> &RefsInMBB,
unsigned &RestoreIndex) {
Evan Cheng
committed
// FIXME: Allow spill to be inserted to the beginning of the mbb. Update mbb
// begin index accordingly.
MachineBasicBlock::iterator Pt = MBB->end();
Evan Cheng
committed
unsigned EndIdx = LIs->getMBBEndIdx(MBB);
Evan Cheng
committed
Evan Cheng
committed
// Go bottom up if RefsInMBB is empty and the end of the mbb isn't beyond
// the last index in the live range.
if (RefsInMBB.empty() && LastIdx >= EndIdx) {
Owen Anderson
committed
MachineBasicBlock::iterator MII = MBB->getFirstTerminator();
Evan Cheng
committed
MachineBasicBlock::iterator EndPt = MI;
Evan Cheng
committed
--MII;
Evan Cheng
committed
do {
unsigned Index = LIs->getInstructionIndex(MII);
Evan Cheng
committed
if (Gap) {
Pt = MII;
RestoreIndex = Gap;
break;
}
Evan Cheng
committed
--MII;
Evan Cheng
committed
} while (MII != EndPt);
} else {
MachineBasicBlock::iterator MII = MI;
MII = ++MII;
Evan Cheng
committed
// FIXME: Limit the number of instructions to examine to reduce
// compile time?
Evan Cheng
committed
while (MII != MBB->end()) {
unsigned Index = LIs->getInstructionIndex(MII);
Evan Cheng
committed
if (Index > LastIdx)
break;
Evan Cheng
committed
unsigned Gap = LIs->findGapBeforeInstr(Index);
if (Gap) {
Pt = MII;
RestoreIndex = Gap;
}
if (RefsInMBB.count(MII))
break;
++MII;
}
}
return Pt;
}
/// CreateSpillStackSlot - Create a stack slot for the live interval being
/// split. If the live interval was previously split, just reuse the same
/// slot.
int PreAllocSplitting::CreateSpillStackSlot(unsigned Reg,
const TargetRegisterClass *RC) {
int SS;
DenseMap<unsigned, int>::iterator I = IntervalSSMap.find(Reg);
if (I != IntervalSSMap.end()) {
SS = I->second;
} else {
SS = MFI->CreateStackObject(RC->getSize(), RC->getAlignment());
IntervalSSMap[Reg] = SS;
Evan Cheng
committed
}
// Create live interval for stack slot.
CurrSLI = &LSs->getOrCreateInterval(SS);
Evan Cheng
committed
if (CurrSLI->hasAtLeastOneValue())
CurrSValNo = CurrSLI->getValNumInfo(0);
else
CurrSValNo = CurrSLI->getNextValue(~0U, 0, LSs->getVNInfoAllocator());
return SS;
Evan Cheng
committed
}
/// IsAvailableInStack - Return true if register is available in a split stack
/// slot at the specified index.
bool
Evan Cheng
committed
PreAllocSplitting::IsAvailableInStack(MachineBasicBlock *DefMBB,
unsigned Reg, unsigned DefIndex,
unsigned RestoreIndex, unsigned &SpillIndex,
int& SS) const {
if (!DefMBB)
return false;
DenseMap<unsigned, int>::iterator I = IntervalSSMap.find(Reg);
if (I == IntervalSSMap.end())
Evan Cheng
committed
return false;
Evan Cheng
committed
DenseMap<unsigned, unsigned>::iterator II = Def2SpillMap.find(DefIndex);
if (II == Def2SpillMap.end())
return false;
// If last spill of def is in the same mbb as barrier mbb (where restore will
// be), make sure it's not below the intended restore index.
// FIXME: Undo the previous spill?
assert(LIs->getMBBFromIndex(II->second) == DefMBB);
if (DefMBB == BarrierMBB && II->second >= RestoreIndex)
return false;
SS = I->second;
SpillIndex = II->second;
return true;
}
/// UpdateSpillSlotInterval - Given the specified val# of the register live
/// interval being split, and the spill and restore indicies, update the live
/// interval of the spill stack slot.
void
PreAllocSplitting::UpdateSpillSlotInterval(VNInfo *ValNo, unsigned SpillIndex,
unsigned RestoreIndex) {
Evan Cheng
committed
assert(LIs->getMBBFromIndex(RestoreIndex) == BarrierMBB &&
"Expect restore in the barrier mbb");
MachineBasicBlock *MBB = LIs->getMBBFromIndex(SpillIndex);
if (MBB == BarrierMBB) {
// Intra-block spill + restore. We are done.
LiveRange SLR(SpillIndex, RestoreIndex, CurrSValNo);
CurrSLI->addRange(SLR);
return;
}
Evan Cheng
committed
SmallPtrSet<MachineBasicBlock*, 4> Processed;
unsigned EndIdx = LIs->getMBBEndIdx(MBB);
LiveRange SLR(SpillIndex, EndIdx+1, CurrSValNo);
CurrSLI->addRange(SLR);
Evan Cheng
committed
Processed.insert(MBB);
// Start from the spill mbb, figure out the extend of the spill slot's
// live interval.
SmallVector<MachineBasicBlock*, 4> WorkList;
Evan Cheng
committed
const LiveRange *LR = CurrLI->getLiveRangeContaining(SpillIndex);
if (LR->end > EndIdx)
// If live range extend beyond end of mbb, add successors to work list.
for (MachineBasicBlock::succ_iterator SI = MBB->succ_begin(),
SE = MBB->succ_end(); SI != SE; ++SI)
WorkList.push_back(*SI);
while (!WorkList.empty()) {
MachineBasicBlock *MBB = WorkList.back();
WorkList.pop_back();
Evan Cheng
committed
if (Processed.count(MBB))
continue;
unsigned Idx = LIs->getMBBStartIdx(MBB);
LR = CurrLI->getLiveRangeContaining(Idx);
Evan Cheng
committed
if (LR && LR->valno == ValNo) {
EndIdx = LIs->getMBBEndIdx(MBB);
if (Idx <= RestoreIndex && RestoreIndex < EndIdx) {
// Spill slot live interval stops at the restore.
Evan Cheng
committed
LiveRange SLR(Idx, RestoreIndex, CurrSValNo);
CurrSLI->addRange(SLR);
Evan Cheng
committed
} else if (LR->end > EndIdx) {
// Live range extends beyond end of mbb, process successors.
LiveRange SLR(Idx, EndIdx+1, CurrSValNo);
CurrSLI->addRange(SLR);
for (MachineBasicBlock::succ_iterator SI = MBB->succ_begin(),
SE = MBB->succ_end(); SI != SE; ++SI)
WorkList.push_back(*SI);
Evan Cheng
committed
LiveRange SLR(Idx, LR->end, CurrSValNo);
CurrSLI->addRange(SLR);
}
Evan Cheng
committed
Processed.insert(MBB);
Evan Cheng
committed
}
/// UpdateRegisterInterval - Given the specified val# of the current live
/// interval is being split, and the spill and restore indices, update the live
Evan Cheng
committed
/// interval accordingly.
PreAllocSplitting::UpdateRegisterInterval(VNInfo *ValNo, unsigned SpillIndex,
unsigned RestoreIndex) {
Evan Cheng
committed
assert(LIs->getMBBFromIndex(RestoreIndex) == BarrierMBB &&
"Expect restore in the barrier mbb");
Evan Cheng
committed
SmallVector<std::pair<unsigned,unsigned>, 4> Before;
SmallVector<std::pair<unsigned,unsigned>, 4> After;
SmallVector<unsigned, 4> BeforeKills;
SmallVector<unsigned, 4> AfterKills;
SmallPtrSet<const LiveRange*, 4> Processed;
// First, let's figure out which parts of the live interval is now defined
// by the restore, which are defined by the original definition.
const LiveRange *LR = CurrLI->getLiveRangeContaining(RestoreIndex);
After.push_back(std::make_pair(RestoreIndex, LR->end));
Evan Cheng
committed
if (CurrLI->isKill(ValNo, LR->end))
AfterKills.push_back(LR->end);
assert(LR->contains(SpillIndex));
if (SpillIndex > LR->start) {
Before.push_back(std::make_pair(LR->start, SpillIndex));
BeforeKills.push_back(SpillIndex);
Evan Cheng
committed
}
Evan Cheng
committed
Processed.insert(LR);
// Start from the restore mbb, figure out what part of the live interval
// are defined by the restore.
Evan Cheng
committed
SmallVector<MachineBasicBlock*, 4> WorkList;
Evan Cheng
committed
MachineBasicBlock *MBB = BarrierMBB;
Evan Cheng
committed
for (MachineBasicBlock::succ_iterator SI = MBB->succ_begin(),
SE = MBB->succ_end(); SI != SE; ++SI)
WorkList.push_back(*SI);
Owen Anderson
committed
SmallPtrSet<MachineBasicBlock*, 4> ProcessedBlocks;
ProcessedBlocks.insert(MBB);
Evan Cheng
committed
while (!WorkList.empty()) {
MBB = WorkList.back();
WorkList.pop_back();
unsigned Idx = LIs->getMBBStartIdx(MBB);
LR = CurrLI->getLiveRangeContaining(Idx);
if (LR && LR->valno == ValNo && !Processed.count(LR)) {
After.push_back(std::make_pair(LR->start, LR->end));
if (CurrLI->isKill(ValNo, LR->end))
AfterKills.push_back(LR->end);
Idx = LIs->getMBBEndIdx(MBB);
if (LR->end > Idx) {
// Live range extend beyond at least one mbb. Let's see what other
// mbbs it reaches.
LIs->findReachableMBBs(LR->start, LR->end, WorkList);
Evan Cheng
committed
}
Processed.insert(LR);
}
Owen Anderson
committed
ProcessedBlocks.insert(MBB);
if (LR)
for (MachineBasicBlock::succ_iterator SI = MBB->succ_begin(),
SE = MBB->succ_end(); SI != SE; ++SI)
if (!ProcessedBlocks.count(*SI))
WorkList.push_back(*SI);
Evan Cheng
committed
}
for (LiveInterval::iterator I = CurrLI->begin(), E = CurrLI->end();
I != E; ++I) {
LiveRange *LR = I;
if (LR->valno == ValNo && !Processed.count(LR)) {
Before.push_back(std::make_pair(LR->start, LR->end));
if (CurrLI->isKill(ValNo, LR->end))
BeforeKills.push_back(LR->end);
}
}
// Now create new val#s to represent the live ranges defined by the old def
// those defined by the restore.
unsigned AfterDef = ValNo->def;
MachineInstr *AfterCopy = ValNo->copy;
bool HasPHIKill = ValNo->hasPHIKill;
CurrLI->removeValNo(ValNo);
Evan Cheng
committed
VNInfo *BValNo = (Before.empty())
? NULL
: CurrLI->getNextValue(AfterDef, AfterCopy, LIs->getVNInfoAllocator());
if (BValNo)
CurrLI->addKills(BValNo, BeforeKills);
VNInfo *AValNo = (After.empty())
? NULL
: CurrLI->getNextValue(RestoreIndex, 0, LIs->getVNInfoAllocator());
Evan Cheng
committed
if (AValNo) {
AValNo->hasPHIKill = HasPHIKill;
CurrLI->addKills(AValNo, AfterKills);
}
Evan Cheng
committed
for (unsigned i = 0, e = Before.size(); i != e; ++i) {
unsigned Start = Before[i].first;
unsigned End = Before[i].second;
CurrLI->addRange(LiveRange(Start, End, BValNo));
}
for (unsigned i = 0, e = After.size(); i != e; ++i) {
unsigned Start = After[i].first;
unsigned End = After[i].second;
CurrLI->addRange(LiveRange(Start, End, AValNo));
}
return AValNo;
Evan Cheng
committed
}
/// ShrinkWrapToLastUse - There are uses of the current live interval in the
/// given block, shrink wrap the live interval to the last use (i.e. remove
/// from last use to the end of the mbb). In case mbb is the where the barrier
/// is, remove from the last use to the barrier.
bool
PreAllocSplitting::ShrinkWrapToLastUse(MachineBasicBlock *MBB, VNInfo *ValNo,
Evan Cheng
committed
SmallVector<MachineOperand*, 4> &Uses,
SmallPtrSet<MachineInstr*, 4> &UseMIs) {
Evan Cheng
committed
MachineOperand *LastMO = 0;
MachineInstr *LastMI = 0;
if (MBB != BarrierMBB && Uses.size() == 1) {
// Single use, no need to traverse the block. We can't assume this for the
// barrier bb though since the use is probably below the barrier.
LastMO = Uses[0];
LastMI = LastMO->getParent();
} else {
MachineBasicBlock::iterator MEE = MBB->begin();
Evan Cheng
committed
MachineBasicBlock::iterator MII;
if (MBB == BarrierMBB)
Evan Cheng
committed
MII = Barrier;
else
Evan Cheng
committed
MII = MBB->end();
while (MII != MEE) {
--MII;
Evan Cheng
committed
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
MachineInstr *UseMI = &*MII;
if (!UseMIs.count(UseMI))
continue;
for (unsigned i = 0, e = UseMI->getNumOperands(); i != e; ++i) {
MachineOperand &MO = UseMI->getOperand(i);
if (MO.isReg() && MO.getReg() == CurrLI->reg) {
LastMO = &MO;
break;
}
}
LastMI = UseMI;
break;
}
}
// Cut off live range from last use (or beginning of the mbb if there
// are no uses in it) to the end of the mbb.
unsigned RangeStart, RangeEnd = LIs->getMBBEndIdx(MBB)+1;
if (LastMI) {
RangeStart = LIs->getUseIndex(LIs->getInstructionIndex(LastMI))+1;
assert(!LastMO->isKill() && "Last use already terminates the interval?");
LastMO->setIsKill();
} else {
assert(MBB == BarrierMBB);
RangeStart = LIs->getMBBStartIdx(MBB);
}
if (MBB == BarrierMBB)
Evan Cheng
committed
RangeEnd = LIs->getUseIndex(BarrierIdx)+1;
Evan Cheng
committed
CurrLI->removeRange(RangeStart, RangeEnd);
if (LastMI)
CurrLI->addKill(ValNo, RangeStart);
Evan Cheng
committed
// Return true if the last use becomes a new kill.
return LastMI;
}
Owen Anderson
committed
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
/// PerformPHIConstruction - From properly set up use and def lists, use a PHI
/// construction algorithm to compute the ranges and valnos for an interval.
VNInfo* PreAllocSplitting::PerformPHIConstruction(
MachineBasicBlock::iterator use,
LiveInterval* LI,
DenseMap<MachineBasicBlock*, SmallPtrSet<MachineInstr*, 2> >& Defs,
DenseMap<MachineBasicBlock*, SmallPtrSet<MachineInstr*, 2> >& Uses,
DenseMap<MachineInstr*, VNInfo*>& NewVNs,
DenseMap<MachineBasicBlock*, VNInfo*>& Visited,
bool toplevel) {
// Return memoized result if it's available.
if (Visited.count(use->getParent()))
return Visited[use->getParent()];
typedef DenseMap<MachineBasicBlock*, SmallPtrSet<MachineInstr*, 2> > RegMap;
// Check if our block contains any uses or defs.
bool ContainsDefs = Defs.count(use->getParent());
bool ContainsUses = Uses.count(use->getParent());
VNInfo* ret = 0;
// Enumerate the cases of use/def contaning blocks.
if (!ContainsDefs && !ContainsUses) {
Fallback:
// NOTE: Because this is the fallback case from other cases, we do NOT
// assume that we are not at toplevel here.
// If there are no uses or defs between our starting point and the beginning
// of the block, then recursive perform phi construction on our predecessors
MachineBasicBlock* MBB = use->getParent();
DenseMap<MachineBasicBlock*, VNInfo*> IncomingVNs;
for (MachineBasicBlock::pred_iterator PI = MBB->pred_begin(),
PE = MBB->pred_end(); PI != PE; ++PI) {
VNInfo* Incoming = PerformPHIConstruction((*PI)->end(), LI, Defs, Uses,
NewVNs, Visited, false);
IncomingVNs[*PI] = Incoming;
}
// If only one VNInfo came back from our predecessors, just use that one...
if (IncomingVNs.size() == 1) {
ret = IncomingVNs.begin()->second;
unsigned StartIndex = LIs->getMBBStartIdx(use->getParent());
unsigned EndIndex = 0;
if (toplevel) {
EndIndex = LIs->getInstructionIndex(use);
EndIndex = LiveIntervals::getUseIndex(EndIndex);
} else
EndIndex = LIs->getMBBEndIdx(use->getParent());
LI->addRange(LiveRange(StartIndex, EndIndex, ret));
} else {
// Otherwise, merge the incoming VNInfos with a phi join. Create a new
// VNInfo to represent the joined value.
for (DenseMap<MachineBasicBlock*, VNInfo*>::iterator I =
IncomingVNs.begin(), E = IncomingVNs.end(); I != E; ++I) {
I->second->hasPHIKill = true;
unsigned KillIndex = LIs->getMBBEndIdx(I->first);
LI->addKill(I->second, KillIndex);
}
unsigned StartIndex = LIs->getMBBStartIdx(use->getParent());
unsigned EndIndex = 0;
if (toplevel) {
EndIndex = LIs->getInstructionIndex(use);
EndIndex = LiveIntervals::getUseIndex(EndIndex);
} else
EndIndex = LIs->getMBBEndIdx(use->getParent());
ret = LI->getNextValue(StartIndex, /*FIXME*/ 0,
LIs->getVNInfoAllocator());
LI->addRange(LiveRange(StartIndex, EndIndex, ret));
}
} else if (ContainsDefs && !ContainsUses) {
SmallPtrSet<MachineInstr*, 2>& BlockDefs = Defs[use->getParent()];
// Search for the def in this block. If we don't find it before the
// instruction we care about, go to the fallback case. Note that that
// should never happen: this cannot be a toplevel block, so use should
// always be an end() iterator.
assert(use == use->getParent()->end() && "No use marked in toplevel block");
MachineBasicBlock::iterator walker = use;
--walker;
while (walker != use->getParent()->begin())
if (BlockDefs.count(walker)) {
break;
} else
--walker;
// Once we've found it, extend its VNInfo to our instruction.
unsigned DefIndex = LIs->getInstructionIndex(walker);
DefIndex = LiveIntervals::getDefIndex(DefIndex);
unsigned EndIndex = LIs->getMBBEndIdx(use->getParent());
ret = NewVNs[walker];
LI->addRange(LiveRange(DefIndex, EndIndex, ret));
} else if (!ContainsDefs && ContainsUses) {
SmallPtrSet<MachineInstr*, 2>& BlockUses = Uses[use->getParent()];
// Search for the use in this block that precedes the instruction we care
// about, going to the fallback case if we don't find it.
if (use == use->getParent()->begin())
goto Fallback;
MachineBasicBlock::iterator walker = use;
--walker;
bool found = false;
while (walker != use->getParent()->begin())
if (BlockUses.count(walker)) {
found = true;
break;
} else
--walker;
// Must check begin() too.
if (!found)
if (BlockUses.count(walker))
found = true;
else
goto Fallback;
unsigned UseIndex = LIs->getInstructionIndex(walker);
UseIndex = LiveIntervals::getUseIndex(UseIndex);
unsigned EndIndex = 0;
if (toplevel) {
EndIndex = LIs->getInstructionIndex(walker);
EndIndex = LiveIntervals::getUseIndex(EndIndex);
} else
EndIndex = LIs->getMBBEndIdx(use->getParent());
// Now, recursively phi construct the VNInfo for the use we found,
// and then extend it to include the instruction we care about
ret = PerformPHIConstruction(walker, LI, Defs, Uses,
NewVNs, Visited, false);
// FIXME: Need to set kills properly for inter-block stuff.
if (LI->isKill(ret, UseIndex)) LI->removeKill(ret, UseIndex);
if (toplevel)
Owen Anderson
committed
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
LI->addKill(ret, EndIndex);
LI->addRange(LiveRange(UseIndex, EndIndex, ret));
} else if (ContainsDefs && ContainsUses){
SmallPtrSet<MachineInstr*, 2>& BlockDefs = Defs[use->getParent()];
SmallPtrSet<MachineInstr*, 2>& BlockUses = Uses[use->getParent()];
// This case is basically a merging of the two preceding case, with the
// special note that checking for defs must take precedence over checking
// for uses, because of two-address instructions.
if (use == use->getParent()->begin())
goto Fallback;
MachineBasicBlock::iterator walker = use;
--walker;
bool foundDef = false;
bool foundUse = false;
while (walker != use->getParent()->begin())
if (BlockDefs.count(walker)) {
foundDef = true;
break;
} else if (BlockUses.count(walker)) {
foundUse = true;
break;
} else
--walker;
// Must check begin() too.
if (!foundDef && !foundUse)
if (BlockDefs.count(walker))
foundDef = true;
else if (BlockUses.count(walker))
foundUse = true;
else
goto Fallback;
unsigned StartIndex = LIs->getInstructionIndex(walker);
StartIndex = foundDef ? LiveIntervals::getDefIndex(StartIndex) :
LiveIntervals::getUseIndex(StartIndex);
unsigned EndIndex = 0;
if (toplevel) {
EndIndex = LIs->getInstructionIndex(walker);
EndIndex = LiveIntervals::getUseIndex(EndIndex);
} else
EndIndex = LIs->getMBBEndIdx(use->getParent());
if (foundDef)
ret = NewVNs[walker];
else
ret = PerformPHIConstruction(walker, LI, Defs, Uses,
NewVNs, Visited, false);
if (foundUse && LI->isKill(ret, StartIndex))
LI->removeKill(ret, StartIndex);
Owen Anderson
committed
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
if (toplevel) {
LI->addKill(ret, EndIndex);
}
LI->addRange(LiveRange(StartIndex, EndIndex, ret));
}
// Memoize results so we don't have to recompute them.
if (!toplevel) Visited[use->getParent()] = ret;
return ret;
}
/// ReconstructLiveInterval - Recompute a live interval from scratch.
void PreAllocSplitting::ReconstructLiveInterval(LiveInterval* LI) {
BumpPtrAllocator& Alloc = LIs->getVNInfoAllocator();
// Clear the old ranges and valnos;
LI->clear();
// Cache the uses and defs of the register
typedef DenseMap<MachineBasicBlock*, SmallPtrSet<MachineInstr*, 2> > RegMap;
RegMap Defs, Uses;
// Keep track of the new VNs we're creating.
DenseMap<MachineInstr*, VNInfo*> NewVNs;
SmallPtrSet<VNInfo*, 2> PhiVNs;
// Cache defs, and create a new VNInfo for each def.
for (MachineRegisterInfo::def_iterator DI = MRI->def_begin(LI->reg),
DE = MRI->def_end(); DI != DE; ++DI) {
Defs[(*DI).getParent()].insert(&*DI);
unsigned DefIdx = LIs->getInstructionIndex(&*DI);
DefIdx = LiveIntervals::getDefIndex(DefIdx);
VNInfo* NewVN = LI->getNextValue(DefIdx, /*FIXME*/ 0, Alloc);
NewVNs[&*DI] = NewVN;
}
// Cache uses as a separate pass from actually processing them.
for (MachineRegisterInfo::use_iterator UI = MRI->use_begin(LI->reg),
UE = MRI->use_end(); UI != UE; ++UI)
Uses[(*UI).getParent()].insert(&*UI);
// Now, actually process every use and use a phi construction algorithm
// to walk from it to its reaching definitions, building VNInfos along
// the way.
for (MachineRegisterInfo::use_iterator UI = MRI->use_begin(LI->reg),
UE = MRI->use_end(); UI != UE; ++UI) {
DenseMap<MachineBasicBlock*, VNInfo*> Visited;
PerformPHIConstruction(&*UI, LI, Defs, Uses, NewVNs, Visited, true);
}
// Add ranges for dead defs
for (MachineRegisterInfo::def_iterator DI = MRI->def_begin(LI->reg),
DE = MRI->def_end(); DI != DE; ++DI) {
unsigned DefIdx = LIs->getInstructionIndex(&*DI);
DefIdx = LiveIntervals::getDefIndex(DefIdx);
unsigned UseIdx = LiveIntervals::getUseIndex(DefIdx);
if (LI->liveAt(DefIdx)) continue;
VNInfo* DeadVN = NewVNs[&*DI];
LI->addRange(LiveRange(DefIdx, UseIdx, DeadVN));
LI->addKill(DeadVN, DefIdx);
}
Owen Anderson
committed
}
Evan Cheng
committed
/// ShrinkWrapLiveInterval - Recursively traverse the predecessor
/// chain to find the new 'kills' and shrink wrap the live interval to the
/// new kill indices.
void
Evan Cheng
committed
PreAllocSplitting::ShrinkWrapLiveInterval(VNInfo *ValNo, MachineBasicBlock *MBB,
MachineBasicBlock *SuccMBB, MachineBasicBlock *DefMBB,
Evan Cheng
committed
SmallPtrSet<MachineBasicBlock*, 8> &Visited,
DenseMap<MachineBasicBlock*, SmallVector<MachineOperand*, 4> > &Uses,
DenseMap<MachineBasicBlock*, SmallPtrSet<MachineInstr*, 4> > &UseMIs,
SmallVector<MachineBasicBlock*, 4> &UseMBBs) {
Evan Cheng
committed
if (Visited.count(MBB))
Evan Cheng
committed
return;
Evan Cheng
committed
// If live interval is live in another successor path, then we can't process
// this block. But we may able to do so after all the successors have been
// processed.
Evan Cheng
committed
if (MBB != BarrierMBB) {
for (MachineBasicBlock::succ_iterator SI = MBB->succ_begin(),
SE = MBB->succ_end(); SI != SE; ++SI) {
MachineBasicBlock *SMBB = *SI;
if (SMBB == SuccMBB)
continue;
if (CurrLI->liveAt(LIs->getMBBStartIdx(SMBB)))
return;
}
Evan Cheng
committed
}
Visited.insert(MBB);
Evan Cheng
committed
DenseMap<MachineBasicBlock*, SmallVector<MachineOperand*, 4> >::iterator
UMII = Uses.find(MBB);
Evan Cheng
committed
if (UMII != Uses.end()) {
// At least one use in this mbb, lets look for the kill.
Evan Cheng
committed
DenseMap<MachineBasicBlock*, SmallPtrSet<MachineInstr*, 4> >::iterator
UMII2 = UseMIs.find(MBB);
if (ShrinkWrapToLastUse(MBB, ValNo, UMII->second, UMII2->second))
Evan Cheng
committed
// Found a kill, shrink wrapping of this path ends here.
return;
Evan Cheng
committed
} else if (MBB == DefMBB) {
// There are no uses after the def.
MachineInstr *DefMI = LIs->getInstructionFromIndex(ValNo->def);
if (UseMBBs.empty()) {
// The only use must be below barrier in the barrier block. It's safe to
// remove the def.
LIs->RemoveMachineInstrFromMaps(DefMI);
DefMI->eraseFromParent();
CurrLI->removeRange(ValNo->def, LIs->getMBBEndIdx(MBB)+1);
}
} else if (MBB == BarrierMBB) {
// Remove entire live range from start of mbb to barrier.
CurrLI->removeRange(LIs->getMBBStartIdx(MBB),
LIs->getUseIndex(BarrierIdx)+1);
Evan Cheng
committed
} else {
// Remove entire live range of the mbb out of the live interval.
Evan Cheng
committed
CurrLI->removeRange(LIs->getMBBStartIdx(MBB), LIs->getMBBEndIdx(MBB)+1);
Evan Cheng
committed
}
if (MBB == DefMBB)
// Reached the def mbb, stop traversing this path further.
return;
// Traverse the pathes up the predecessor chains further.
for (MachineBasicBlock::pred_iterator PI = MBB->pred_begin(),
PE = MBB->pred_end(); PI != PE; ++PI) {
MachineBasicBlock *Pred = *PI;
if (Pred == MBB)
continue;
if (Pred == DefMBB && ValNo->hasPHIKill)
// Pred is the def bb and the def reaches other val#s, we must
// allow the value to be live out of the bb.
continue;
Owen Anderson
committed
if (!CurrLI->liveAt(LIs->getMBBEndIdx(Pred)-1))
return;
Evan Cheng
committed
ShrinkWrapLiveInterval(ValNo, Pred, MBB, DefMBB, Visited,
Uses, UseMIs, UseMBBs);
Evan Cheng
committed
}
return;
}
void PreAllocSplitting::RepairLiveInterval(LiveInterval* CurrLI,
VNInfo* ValNo,
MachineInstr* DefMI,
unsigned RestoreIdx) {
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
// Shrink wrap the live interval by walking up the CFG and find the
// new kills.
// Now let's find all the uses of the val#.
DenseMap<MachineBasicBlock*, SmallVector<MachineOperand*, 4> > Uses;
DenseMap<MachineBasicBlock*, SmallPtrSet<MachineInstr*, 4> > UseMIs;
SmallPtrSet<MachineBasicBlock*, 4> Seen;
SmallVector<MachineBasicBlock*, 4> UseMBBs;
for (MachineRegisterInfo::use_iterator UI = MRI->use_begin(CurrLI->reg),
UE = MRI->use_end(); UI != UE; ++UI) {
MachineOperand &UseMO = UI.getOperand();
MachineInstr *UseMI = UseMO.getParent();
unsigned UseIdx = LIs->getInstructionIndex(UseMI);
LiveInterval::iterator ULR = CurrLI->FindLiveRangeContaining(UseIdx);
if (ULR->valno != ValNo)
continue;
MachineBasicBlock *UseMBB = UseMI->getParent();
// Remember which other mbb's use this val#.
if (Seen.insert(UseMBB) && UseMBB != BarrierMBB)
UseMBBs.push_back(UseMBB);
DenseMap<MachineBasicBlock*, SmallVector<MachineOperand*, 4> >::iterator
UMII = Uses.find(UseMBB);
if (UMII != Uses.end()) {
DenseMap<MachineBasicBlock*, SmallPtrSet<MachineInstr*, 4> >::iterator
UMII2 = UseMIs.find(UseMBB);
UMII->second.push_back(&UseMO);
UMII2->second.insert(UseMI);
} else {
SmallVector<MachineOperand*, 4> Ops;
Ops.push_back(&UseMO);
Uses.insert(std::make_pair(UseMBB, Ops));
SmallPtrSet<MachineInstr*, 4> MIs;
MIs.insert(UseMI);
UseMIs.insert(std::make_pair(UseMBB, MIs));
}
}
// Walk up the predecessor chains.
SmallPtrSet<MachineBasicBlock*, 8> Visited;
ShrinkWrapLiveInterval(ValNo, BarrierMBB, NULL, DefMI->getParent(), Visited,
Uses, UseMIs, UseMBBs);
// Remove live range from barrier to the restore. FIXME: Find a better
// point to re-start the live interval.
VNInfo* AfterValNo = UpdateRegisterInterval(ValNo,
LIs->getUseIndex(BarrierIdx)+1,
LIs->getDefIndex(RestoreIdx));
// Attempt to renumber the new valno into a new vreg.
RenumberValno(AfterValNo);
Owen Anderson
committed
/// RenumberValno - Split the given valno out into a new vreg, allowing it to
/// be allocated to a different register. This function creates a new vreg,
/// copies the valno and its live ranges over to the new vreg's interval,
/// removes them from the old interval, and rewrites all uses and defs of
/// the original reg to the new vreg within those ranges.
void PreAllocSplitting::RenumberValno(VNInfo* VN) {
SmallVector<VNInfo*, 4> Stack;
SmallVector<VNInfo*, 4> VNsToCopy;
Stack.push_back(VN);
// Walk through and copy the valno we care about, and any other valnos
// that are two-address redefinitions of the one we care about. These
// will need to be rewritten as well. We also check for safety of the
// renumbering here, by making sure that none of the valno involved has