Skip to content
LiveIntervalAnalysis.cpp 47.9 KiB
Newer Older
  void collectRanges(const MachineOperand &MO, LiveInterval *LI,
                     RangeSet &Entering, RangeSet &Internal, RangeSet &Exiting,
                     SlotIndex OldIdx) {
    if (MO.readsReg()) {
      LiveRange* LR = LI->getLiveRangeContaining(OldIdx);
      if (LR != 0)
        Entering.insert(std::make_pair(LI, LR));
    }
    if (MO.isDef()) {
      LiveRange* LR = LI->getLiveRangeContaining(OldIdx.getRegSlot());
      assert(LR != 0 && "No live range for def?");
      if (LR->end > OldIdx.getDeadSlot())
        Exiting.insert(std::make_pair(LI, LR));
      else
        Internal.insert(std::make_pair(LI, LR));
Jakob Stoklund Olesen's avatar
Jakob Stoklund Olesen committed
  BundleRanges createBundleRanges(RangeSet& Entering,
                                  RangeSet& Internal,
                                  RangeSet& Exiting) {

    for (RangeSet::iterator EI = Entering.begin(), EE = Entering.end();
      LiveInterval* LI = EI->first;
      LiveRange* LR = EI->second;
      BR[LI->reg].Use = LR;
    }

    for (RangeSet::iterator II = Internal.begin(), IE = Internal.end();
      LiveInterval* LI = II->first;
      LiveRange* LR = II->second;
      if (LR->end.isDead()) {
        BR[LI->reg].Dead = LR;
      } else {
        BR[LI->reg].EC = LR;
      }
    }

    for (RangeSet::iterator EI = Exiting.begin(), EE = Exiting.end();
      LiveInterval* LI = EI->first;
      LiveRange* LR = EI->second;
      BR[LI->reg].Def = LR;
    }

    return BR;
  }

  void moveKillFlags(unsigned reg, SlotIndex OldIdx, SlotIndex newKillIdx) {
    MachineInstr* OldKillMI = LIS.getInstructionFromIndex(OldIdx);
    if (!OldKillMI->killsRegister(reg))
      return; // Bail out if we don't have kill flags on the old register.
    MachineInstr* NewKillMI = LIS.getInstructionFromIndex(newKillIdx);
    assert(OldKillMI->killsRegister(reg) && "Old 'kill' instr isn't a kill.");
Jakob Stoklund Olesen's avatar
Jakob Stoklund Olesen committed
    assert(!NewKillMI->killsRegister(reg) &&
           "New kill instr is already a kill.");
    OldKillMI->clearRegisterKills(reg, &TRI);
    NewKillMI->addRegisterKilled(reg, &TRI);
  void updateRegMaskSlots(SlotIndex OldIdx) {
    SmallVectorImpl<SlotIndex>::iterator RI =
      std::lower_bound(LIS.RegMaskSlots.begin(), LIS.RegMaskSlots.end(),
                       OldIdx);
    assert(*RI == OldIdx && "No RegMask at OldIdx.");
    *RI = NewIdx;
    assert(*prior(RI) < *RI && *RI < *next(RI) &&
           "RegSlots out of order. Did you move one call across another?");
  // Return the last use of reg between NewIdx and OldIdx.
  SlotIndex findLastUseBefore(unsigned Reg, SlotIndex OldIdx) {
    SlotIndex LastUse = NewIdx;
    for (MachineRegisterInfo::use_nodbg_iterator
           UI = MRI.use_nodbg_begin(Reg),
           UE = MRI.use_nodbg_end();
      const MachineInstr* MI = &*UI;
      SlotIndex InstSlot = LIS.getSlotIndexes()->getInstructionIndex(MI);
      if (InstSlot > LastUse && InstSlot < OldIdx)
        LastUse = InstSlot;
  void moveEnteringUpFrom(SlotIndex OldIdx, IntRangePair& P) {
    LiveInterval* LI = P.first;
    LiveRange* LR = P.second;
    bool LiveThrough = LR->end > OldIdx.getRegSlot();
    if (LiveThrough)
      return;
    SlotIndex LastUse = findLastUseBefore(LI->reg, OldIdx);
    if (LastUse != NewIdx)
      moveKillFlags(LI->reg, NewIdx, LastUse);
  void moveEnteringDownFrom(SlotIndex OldIdx, IntRangePair& P) {
    LiveInterval* LI = P.first;
    LiveRange* LR = P.second;
    // Extend the LiveRange if NewIdx is past the end.
    if (NewIdx > LR->end) {
      // Move kill flags if OldIdx was not originally the end
      // (otherwise LR->end points to an invalid slot).
      if (LR->end.getRegSlot() != OldIdx.getRegSlot()) {
        assert(LR->end > OldIdx && "LiveRange does not cover original slot");
        moveKillFlags(LI->reg, LR->end, NewIdx);
      }
      LR->end = NewIdx.getRegSlot();
  void moveAllEnteringFrom(SlotIndex OldIdx, RangeSet& Entering) {
    bool GoingUp = NewIdx < OldIdx;

    if (GoingUp) {
      for (RangeSet::iterator EI = Entering.begin(), EE = Entering.end();
           EI != EE; ++EI)
        moveEnteringUpFrom(OldIdx, *EI);
    } else {
      for (RangeSet::iterator EI = Entering.begin(), EE = Entering.end();
           EI != EE; ++EI)
        moveEnteringDownFrom(OldIdx, *EI);
  void moveInternalFrom(SlotIndex OldIdx, IntRangePair& P) {
    LiveInterval* LI = P.first;
    LiveRange* LR = P.second;
    assert(OldIdx < LR->start && LR->start < OldIdx.getDeadSlot() &&
           LR->end <= OldIdx.getDeadSlot() &&
           "Range should be internal to OldIdx.");
    LiveRange Tmp(*LR);
    Tmp.start = NewIdx.getRegSlot(LR->start.isEarlyClobber());
    Tmp.valno->def = Tmp.start;
    Tmp.end = LR->end.isDead() ? NewIdx.getDeadSlot() : NewIdx.getRegSlot();
    LI->removeRange(*LR);
    LI->addRange(Tmp);
  }

  void moveAllInternalFrom(SlotIndex OldIdx, RangeSet& Internal) {
    for (RangeSet::iterator II = Internal.begin(), IE = Internal.end();
         II != IE; ++II)
      moveInternalFrom(OldIdx, *II);

  void moveExitingFrom(SlotIndex OldIdx, IntRangePair& P) {
    LiveRange* LR = P.second;
    assert(OldIdx < LR->start && LR->start < OldIdx.getDeadSlot() &&
           "Range should start in OldIdx.");
    assert(LR->end > OldIdx.getDeadSlot() && "Range should exit OldIdx.");
    SlotIndex NewStart = NewIdx.getRegSlot(LR->start.isEarlyClobber());
    LR->start = NewStart;
    LR->valno->def = NewStart;
  }

  void moveAllExitingFrom(SlotIndex OldIdx, RangeSet& Exiting) {
    for (RangeSet::iterator EI = Exiting.begin(), EE = Exiting.end();
         EI != EE; ++EI)
      moveExitingFrom(OldIdx, *EI);
  }

  void moveEnteringUpFromInto(SlotIndex OldIdx, IntRangePair& P,
                              BundleRanges& BR) {
    LiveInterval* LI = P.first;
    LiveRange* LR = P.second;
    bool LiveThrough = LR->end > OldIdx.getRegSlot();
    if (LiveThrough) {
      assert((LR->start < NewIdx || BR[LI->reg].Def == LR) &&
             "Def in bundle should be def range.");
      assert((BR[LI->reg].Use == 0 || BR[LI->reg].Use == LR) &&
             "If bundle has use for this reg it should be LR.");
      BR[LI->reg].Use = LR;
      return;
    }

    SlotIndex LastUse = findLastUseBefore(LI->reg, OldIdx);
    moveKillFlags(LI->reg, OldIdx, LastUse);

    if (LR->start < NewIdx) {
      // Becoming a new entering range.
      assert(BR[LI->reg].Dead == 0 && BR[LI->reg].Def == 0 &&
             "Bundle shouldn't be re-defining reg mid-range.");
      assert((BR[LI->reg].Use == 0 || BR[LI->reg].Use == LR) &&
             "Bundle shouldn't have different use range for same reg.");
      LR->end = LastUse.getRegSlot();
      BR[LI->reg].Use = LR;
    } else {
      // Becoming a new Dead-def.
      assert(LR->start == NewIdx.getRegSlot(LR->start.isEarlyClobber()) &&
             "Live range starting at unexpected slot.");
      assert(BR[LI->reg].Def == LR && "Reg should have def range.");
      assert(BR[LI->reg].Dead == 0 &&
               "Can't have def and dead def of same reg in a bundle.");
      LR->end = LastUse.getDeadSlot();
      BR[LI->reg].Dead = BR[LI->reg].Def;
      BR[LI->reg].Def = 0;
    }
  }

  void moveEnteringDownFromInto(SlotIndex OldIdx, IntRangePair& P,
                                BundleRanges& BR) {
    LiveInterval* LI = P.first;
    LiveRange* LR = P.second;
    if (NewIdx > LR->end) {
      // Range extended to bundle. Add to bundle uses.
      // Note: Currently adds kill flags to bundle start.
      assert(BR[LI->reg].Use == 0 &&
             "Bundle already has use range for reg.");
      moveKillFlags(LI->reg, LR->end, NewIdx);
      LR->end = NewIdx.getRegSlot();
      BR[LI->reg].Use = LR;
    } else {
      assert(BR[LI->reg].Use != 0 &&
             "Bundle should already have a use range for reg.");
    }
  }

  void moveAllEnteringFromInto(SlotIndex OldIdx, RangeSet& Entering,
                               BundleRanges& BR) {
    bool GoingUp = NewIdx < OldIdx;

    if (GoingUp) {
      for (RangeSet::iterator EI = Entering.begin(), EE = Entering.end();
           EI != EE; ++EI)
        moveEnteringUpFromInto(OldIdx, *EI, BR);
    } else {
      for (RangeSet::iterator EI = Entering.begin(), EE = Entering.end();
           EI != EE; ++EI)
        moveEnteringDownFromInto(OldIdx, *EI, BR);
    }
  }

  void moveInternalFromInto(SlotIndex OldIdx, IntRangePair& P,
                            BundleRanges& BR) {
    // TODO: Sane rules for moving ranges into bundles.
  }

  void moveAllInternalFromInto(SlotIndex OldIdx, RangeSet& Internal,
                               BundleRanges& BR) {
    for (RangeSet::iterator II = Internal.begin(), IE = Internal.end();
         II != IE; ++II)
      moveInternalFromInto(OldIdx, *II, BR);
  }

  void moveExitingFromInto(SlotIndex OldIdx, IntRangePair& P,
                           BundleRanges& BR) {
    LiveInterval* LI = P.first;
    LiveRange* LR = P.second;

    assert(LR->start.isRegister() &&
           "Don't know how to merge exiting ECs into bundles yet.");

    if (LR->end > NewIdx.getDeadSlot()) {
      // This range is becoming an exiting range on the bundle.
      // If there was an old dead-def of this reg, delete it.
      if (BR[LI->reg].Dead != 0) {
        LI->removeRange(*BR[LI->reg].Dead);
        BR[LI->reg].Dead = 0;
      }
      assert(BR[LI->reg].Def == 0 &&
             "Can't have two defs for the same variable exiting a bundle.");
      LR->start = NewIdx.getRegSlot();
      LR->valno->def = LR->start;
      BR[LI->reg].Def = LR;
    } else {
      // This range is becoming internal to the bundle.
      assert(LR->end == NewIdx.getRegSlot() &&
             "Can't bundle def whose kill is before the bundle");
      if (BR[LI->reg].Dead || BR[LI->reg].Def) {
        // Already have a def for this. Just delete range.
        LI->removeRange(*LR);
      } else {
        // Make range dead, record.
        LR->end = NewIdx.getDeadSlot();
        BR[LI->reg].Dead = LR;
        assert(BR[LI->reg].Use == LR &&
               "Range becoming dead should currently be use.");
      }
      // In both cases the range is no longer a use on the bundle.
      BR[LI->reg].Use = 0;
    }
  }

  void moveAllExitingFromInto(SlotIndex OldIdx, RangeSet& Exiting,
                              BundleRanges& BR) {
    for (RangeSet::iterator EI = Exiting.begin(), EE = Exiting.end();
         EI != EE; ++EI)
      moveExitingFromInto(OldIdx, *EI, BR);
  }

void LiveIntervals::handleMove(MachineInstr* MI) {
  SlotIndex OldIndex = Indexes->getInstructionIndex(MI);
  Indexes->removeMachineInstrFromMaps(MI);
  SlotIndex NewIndex = MI->isInsideBundle() ?
                        Indexes->getInstructionIndex(MI) :
                        Indexes->insertMachineInstrInMaps(MI);
  assert(getMBBStartIdx(MI->getParent()) <= OldIndex &&
         OldIndex < getMBBEndIdx(MI->getParent()) &&
         "Cannot handle moves across basic block boundaries.");
  assert(!MI->isBundled() && "Can't handle bundled instructions yet.");
  HMEditor HME(*this, *MRI, *TRI, NewIndex);
Jakob Stoklund Olesen's avatar
Jakob Stoklund Olesen committed
void LiveIntervals::handleMoveIntoBundle(MachineInstr* MI,
                                         MachineInstr* BundleStart) {
  SlotIndex NewIndex = Indexes->getInstructionIndex(BundleStart);
  HMEditor HME(*this, *MRI, *TRI, NewIndex);
  HME.moveAllRangesInto(MI, BundleStart);