Skip to content
VirtRegRewriter.cpp 97 KiB
Newer Older
          // register we want to reload into might not actually be
          // available.  If this occurs, use the register indicated by the
          // reuser.
          if (ReusedOperands.hasReuses())
            DesignatedReg = ReusedOperands.GetRegForReload(VirtReg,
                                                           DesignatedReg, &MI, 
                               Spills, MaybeDeadStores, RegKills, KillOps, VRM);
          
          // If the mapped designated register is actually the physreg we have
          // incoming, we don't need to inserted a dead copy.
          if (DesignatedReg == PhysReg) {
            // If this stack slot value is already available, reuse it!
            if (ReuseSlot > VirtRegMap::MAX_STACK_SLOT)
David Greene's avatar
David Greene committed
              DEBUG(dbgs() << "Reusing RM#"
Chris Lattner's avatar
Chris Lattner committed
                    << ReuseSlot-VirtRegMap::MAX_STACK_SLOT-1);
David Greene's avatar
David Greene committed
              DEBUG(dbgs() << "Reusing SS#" << ReuseSlot);
            DEBUG(dbgs() << " from physreg " << TRI->getName(PhysReg)
Chris Lattner's avatar
Chris Lattner committed
                         << " for vreg" << VirtReg
                         << " instead of reloading into same physreg.\n");
            unsigned RReg = SubIdx ? TRI->getSubReg(PhysReg, SubIdx) : PhysReg;
            MI.getOperand(i).setReg(RReg);
            MI.getOperand(i).setSubReg(0);
            ReusedOperands.markClobbered(RReg);
            ++NumReused;
            continue;
          }
          
          const TargetRegisterClass* RC = RegInfo->getRegClass(VirtReg);
          RegInfo->setPhysRegUsed(DesignatedReg);
          ReusedOperands.markClobbered(DesignatedReg);

David Greene's avatar
 
David Greene committed
          // Back-schedule reloads and remats.
          MachineBasicBlock::iterator InsertLoc =
            ComputeReloadLoc(&MI, MBB.begin(), PhysReg, TRI, DoReMat,
                             SSorRMId, TII, MF);

          TII->copyRegToReg(MBB, InsertLoc, DesignatedReg, PhysReg, RC, RC);

          MachineInstr *CopyMI = prior(InsertLoc);
          CopyMI->setAsmPrinterFlag(MachineInstr::ReloadReuse);
          UpdateKills(*CopyMI, TRI, RegKills, KillOps);

          // This invalidates DesignatedReg.
          Spills.ClobberPhysReg(DesignatedReg);
          
          Spills.addAvailable(ReuseSlot, DesignatedReg);
          unsigned RReg =
            SubIdx ? TRI->getSubReg(DesignatedReg, SubIdx) : DesignatedReg;
          MI.getOperand(i).setReg(RReg);
          MI.getOperand(i).setSubReg(0);
David Greene's avatar
David Greene committed
          DEBUG(dbgs() << '\t' << *prior(MII));
          ++NumReused;
          continue;
        } // if (PhysReg)
        
        // Otherwise, reload it and remember that we have it.
        PhysReg = VRM.getPhys(VirtReg);
        assert(PhysReg && "Must map virtreg to physreg!");

        // Note that, if we reused a register for a previous operand, the
        // register we want to reload into might not actually be
        // available.  If this occurs, use the register indicated by the
        // reuser.
        if (ReusedOperands.hasReuses())
          PhysReg = ReusedOperands.GetRegForReload(VirtReg, PhysReg, &MI, 
                               Spills, MaybeDeadStores, RegKills, KillOps, VRM);
        
        RegInfo->setPhysRegUsed(PhysReg);
        ReusedOperands.markClobbered(PhysReg);
        if (AvoidReload)
          ++NumAvoided;
        else {
David Greene's avatar
 
David Greene committed
          // Back-schedule reloads and remats.
          MachineBasicBlock::iterator InsertLoc =
            ComputeReloadLoc(MII, MBB.begin(), PhysReg, TRI, DoReMat,
                             SSorRMId, TII, MF);

David Greene's avatar
 
David Greene committed
            ReMaterialize(MBB, InsertLoc, PhysReg, VirtReg, TII, TRI, VRM);
          } else {
            const TargetRegisterClass* RC = RegInfo->getRegClass(VirtReg);
David Greene's avatar
 
David Greene committed
            TII->loadRegFromStackSlot(MBB, InsertLoc, PhysReg, SSorRMId, RC);
            MachineInstr *LoadMI = prior(InsertLoc);
            VRM.addSpillSlotUse(SSorRMId, LoadMI);
            ++NumLoads;
            DistanceMap.insert(std::make_pair(LoadMI, Dist++));
          }
          // This invalidates PhysReg.
          Spills.ClobberPhysReg(PhysReg);

          // Any stores to this stack slot are not dead anymore.
          if (!DoReMat)
            MaybeDeadStores[SSorRMId] = NULL;
          Spills.addAvailable(SSorRMId, PhysReg);
          // Assumes this is the last use. IsKill will be unset if reg is reused
          // unless it's a two-address operand.
          if (!MI.isRegTiedToDefOperand(i) &&
              KilledMIRegs.count(VirtReg) == 0) {
            MI.getOperand(i).setIsKill();
            KilledMIRegs.insert(VirtReg);
          }

David Greene's avatar
 
David Greene committed
          UpdateKills(*prior(InsertLoc), TRI, RegKills, KillOps);
David Greene's avatar
David Greene committed
          DEBUG(dbgs() << '\t' << *prior(InsertLoc));
        }
        unsigned RReg = SubIdx ? TRI->getSubReg(PhysReg, SubIdx) : PhysReg;
        MI.getOperand(i).setReg(RReg);
        MI.getOperand(i).setSubReg(0);
      }

      // Ok - now we can remove stores that have been confirmed dead.
      for (unsigned j = 0, e = PotentialDeadStoreSlots.size(); j != e; ++j) {
        // This was the last use and the spilled value is still available
        // for reuse. That means the spill was unnecessary!
        int PDSSlot = PotentialDeadStoreSlots[j];
        MachineInstr* DeadStore = MaybeDeadStores[PDSSlot];
        if (DeadStore) {
David Greene's avatar
David Greene committed
          DEBUG(dbgs() << "Removed dead store:\t" << *DeadStore);
          InvalidateKills(*DeadStore, TRI, RegKills, KillOps);
          VRM.RemoveMachineInstrFromMaps(DeadStore);
          MBB.erase(DeadStore);
          MaybeDeadStores[PDSSlot] = NULL;
          ++NumDSE;
        }
      }


David Greene's avatar
David Greene committed
      DEBUG(dbgs() << '\t' << MI);


      // If we have folded references to memory operands, make sure we clear all
      // physical registers that may contain the value of the spilled virtual
      // register
      SmallSet<int, 2> FoldedSS;
      for (tie(I, End) = VRM.getFoldedVirts(&MI); I != End; ) {
        unsigned VirtReg = I->second.first;
        VirtRegMap::ModRef MR = I->second.second;
David Greene's avatar
David Greene committed
        DEBUG(dbgs() << "Folded vreg: " << VirtReg << "  MR: " << MR);

        // MI2VirtMap be can updated which invalidate the iterator.
        // Increment the iterator first.
        ++I;
        int SS = VRM.getStackSlot(VirtReg);
        if (SS == VirtRegMap::NO_STACK_SLOT)
          continue;
        FoldedSS.insert(SS);
David Greene's avatar
David Greene committed
        DEBUG(dbgs() << " - StackSlot: " << SS << "\n");
        
        // If this folded instruction is just a use, check to see if it's a
        // straight load from the virt reg slot.
        if ((MR & VirtRegMap::isRef) && !(MR & VirtRegMap::isMod)) {
          int FrameIdx;
          unsigned DestReg = TII->isLoadFromStackSlot(&MI, FrameIdx);
          if (DestReg && FrameIdx == SS) {
            // If this spill slot is available, turn it into a copy (or nothing)
            // instead of leaving it as a load!
            if (unsigned InReg = Spills.getSpillSlotOrReMatPhysReg(SS)) {
David Greene's avatar
David Greene committed
              DEBUG(dbgs() << "Promoted Load To Copy: " << MI);
              if (DestReg != InReg) {
                const TargetRegisterClass *RC = RegInfo->getRegClass(VirtReg);
                TII->copyRegToReg(MBB, &MI, DestReg, InReg, RC, RC);
                MachineOperand *DefMO = MI.findRegisterDefOperand(DestReg);
                unsigned SubIdx = DefMO->getSubReg();
                // Revisit the copy so we make sure to notice the effects of the
                // operation on the destreg (either needing to RA it if it's 
                // virtual or needing to clobber any values if it's physical).
                NextMII = &MI;
                --NextMII;  // backtrack to the copy.
                NextMII->setAsmPrinterFlag(MachineInstr::ReloadReuse);
                // Propagate the sub-register index over.
                if (SubIdx) {
                  DefMO = NextMII->findRegisterDefOperand(DestReg);
                  DefMO->setSubReg(SubIdx);
                }

                // Mark is killed.
                MachineOperand *KillOpnd = NextMII->findRegisterUseOperand(InReg);
                KillOpnd->setIsKill();

                BackTracked = true;
              } else {
David Greene's avatar
David Greene committed
                DEBUG(dbgs() << "Removing now-noop copy: " << MI);
                // Unset last kill since it's being reused.
                InvalidateKill(InReg, TRI, RegKills, KillOps);
              InvalidateKills(MI, TRI, RegKills, KillOps);
              VRM.RemoveMachineInstrFromMaps(&MI);
              MBB.erase(&MI);
              Erased = true;
              goto ProcessNextInst;
            }
          } else {
            unsigned PhysReg = Spills.getSpillSlotOrReMatPhysReg(SS);
            SmallVector<MachineInstr*, 4> NewMIs;
            if (PhysReg &&
                TII->unfoldMemoryOperand(MF, &MI, PhysReg, false, false, NewMIs)) {
              MBB.insert(MII, NewMIs[0]);
              InvalidateKills(MI, TRI, RegKills, KillOps);
              VRM.RemoveMachineInstrFromMaps(&MI);
              MBB.erase(&MI);
              Erased = true;
              --NextMII;  // backtrack to the unfolded instruction.
              BackTracked = true;
              goto ProcessNextInst;
            }
          }
        }

        // If this reference is not a use, any previous store is now dead.
        // Otherwise, the store to this stack slot is not dead anymore.
        MachineInstr* DeadStore = MaybeDeadStores[SS];
        if (DeadStore) {
          bool isDead = !(MR & VirtRegMap::isRef);
          MachineInstr *NewStore = NULL;
          if (MR & VirtRegMap::isModRef) {
            unsigned PhysReg = Spills.getSpillSlotOrReMatPhysReg(SS);
            SmallVector<MachineInstr*, 4> NewMIs;
            // We can reuse this physreg as long as we are allowed to clobber
            // the value and there isn't an earlier def that has already clobbered
            // the physreg.
            if (PhysReg &&
                !ReusedOperands.isClobbered(PhysReg) &&
                Spills.canClobberPhysReg(PhysReg) &&
                !TII->isStoreToStackSlot(&MI, SS)) { // Not profitable!
              MachineOperand *KillOpnd =
                DeadStore->findRegisterUseOperand(PhysReg, true);
              // Note, if the store is storing a sub-register, it's possible the
              // super-register is needed below.
              if (KillOpnd && !KillOpnd->getSubReg() &&
                  TII->unfoldMemoryOperand(MF, &MI, PhysReg, false, true,NewMIs)){
                MBB.insert(MII, NewMIs[0]);
                NewStore = NewMIs[1];
                MBB.insert(MII, NewStore);
                VRM.addSpillSlotUse(SS, NewStore);
                InvalidateKills(MI, TRI, RegKills, KillOps);
                VRM.RemoveMachineInstrFromMaps(&MI);
                MBB.erase(&MI);
                Erased = true;
                --NextMII;
                --NextMII;  // backtrack to the unfolded instruction.
                BackTracked = true;
                isDead = true;
                ++NumSUnfold;
              }
            }
          }

          if (isDead) {  // Previous store is dead.
            // If we get here, the store is dead, nuke it now.
David Greene's avatar
David Greene committed
            DEBUG(dbgs() << "Removed dead store:\t" << *DeadStore);
            InvalidateKills(*DeadStore, TRI, RegKills, KillOps);
            VRM.RemoveMachineInstrFromMaps(DeadStore);
            MBB.erase(DeadStore);
            if (!NewStore)
              ++NumDSE;
          }

          MaybeDeadStores[SS] = NULL;
          if (NewStore) {
            // Treat this store as a spill merged into a copy. That makes the
            // stack slot value available.
            VRM.virtFolded(VirtReg, NewStore, VirtRegMap::isMod);
            goto ProcessNextInst;
          }
        }

        // If the spill slot value is available, and this is a new definition of
        // the value, the value is not available anymore.
        if (MR & VirtRegMap::isMod) {
          // Notice that the value in this stack slot has been modified.
          Spills.ModifyStackSlotOrReMat(SS);
          
          // If this is *just* a mod of the value, check to see if this is just a
          // store to the spill slot (i.e. the spill got merged into the copy). If
          // so, realize that the vreg is available now, and add the store to the
          // MaybeDeadStore info.
          int StackSlot;
          if (!(MR & VirtRegMap::isRef)) {
            if (unsigned SrcReg = TII->isStoreToStackSlot(&MI, StackSlot)) {
              assert(TargetRegisterInfo::isPhysicalRegister(SrcReg) &&
                     "Src hasn't been allocated yet?");

              if (CommuteToFoldReload(MBB, MII, VirtReg, SrcReg, StackSlot,
                                      Spills, RegKills, KillOps, TRI, VRM)) {
                BackTracked = true;
                goto ProcessNextInst;
              }

              // Okay, this is certainly a store of SrcReg to [StackSlot].  Mark
              // this as a potentially dead store in case there is a subsequent
              // store into the stack slot without a read from it.
              MaybeDeadStores[StackSlot] = &MI;

              // If the stack slot value was previously available in some other
              // register, change it now.  Otherwise, make the register
              // available in PhysReg.
              Spills.addAvailable(StackSlot, SrcReg, MI.killsRegister(SrcReg));
            }
          }
        }
      }

      // Process all of the spilled defs.
      for (unsigned i = 0, e = MI.getNumOperands(); i != e; ++i) {
        MachineOperand &MO = MI.getOperand(i);
        if (!(MO.isReg() && MO.getReg() && MO.isDef()))
          continue;

        unsigned VirtReg = MO.getReg();
        if (!TargetRegisterInfo::isVirtualRegister(VirtReg)) {
          // Check to see if this is a noop copy.  If so, eliminate the
          // instruction before considering the dest reg to be changed.
          // Also check if it's copying from an "undef", if so, we can't
          // eliminate this or else the undef marker is lost and it will
          // confuses the scavenger. This is extremely rare.
          if (TII->isMoveInstr(MI, Src, Dst, SrcSR, DstSR) && Src == Dst &&
              !MI.findRegisterUseOperand(Src)->isUndef()) {
David Greene's avatar
David Greene committed
            DEBUG(dbgs() << "Removing now-noop copy: " << MI);
            SmallVector<unsigned, 2> KillRegs;
            InvalidateKills(MI, TRI, RegKills, KillOps, &KillRegs);
            if (MO.isDead() && !KillRegs.empty()) {
              // Source register or an implicit super/sub-register use is killed.
              assert(KillRegs[0] == Dst ||
                     TRI->isSubRegister(KillRegs[0], Dst) ||
                     TRI->isSuperRegister(KillRegs[0], Dst));
              // Last def is now dead.
              TransferDeadness(&MBB, Dist, Src, RegKills, KillOps, VRM);
            }
            VRM.RemoveMachineInstrFromMaps(&MI);
            MBB.erase(&MI);
            Erased = true;
            Spills.disallowClobberPhysReg(VirtReg);
            goto ProcessNextInst;
          }
          // If it's not a no-op copy, it clobbers the value in the destreg.
          Spills.ClobberPhysReg(VirtReg);
          ReusedOperands.markClobbered(VirtReg);
   
          // Check to see if this instruction is a load from a stack slot into
          // a register.  If so, this provides the stack slot value in the reg.
          int FrameIdx;
          if (unsigned DestReg = TII->isLoadFromStackSlot(&MI, FrameIdx)) {
            assert(DestReg == VirtReg && "Unknown load situation!");

            // If it is a folded reference, then it's not safe to clobber.
            bool Folded = FoldedSS.count(FrameIdx);
            // Otherwise, if it wasn't available, remember that it is now!
            Spills.addAvailable(FrameIdx, DestReg, !Folded);
            goto ProcessNextInst;
          }
              
          continue;
        }

        unsigned SubIdx = MO.getSubReg();
        bool DoReMat = VRM.isReMaterialized(VirtReg);
        if (DoReMat)
          ReMatDefs.insert(&MI);

        // The only vregs left are stack slot definitions.
        int StackSlot = VRM.getStackSlot(VirtReg);
        const TargetRegisterClass *RC = RegInfo->getRegClass(VirtReg);

        // If this def is part of a two-address operand, make sure to execute
        // the store from the correct physical register.
        unsigned PhysReg;
        unsigned TiedOp;
        if (MI.isRegTiedToUseOperand(i, &TiedOp)) {
          PhysReg = MI.getOperand(TiedOp).getReg();
          if (SubIdx) {
            unsigned SuperReg = findSuperReg(RC, PhysReg, SubIdx, TRI);
            assert(SuperReg && TRI->getSubReg(SuperReg, SubIdx) == PhysReg &&
                   "Can't find corresponding super-register!");
            PhysReg = SuperReg;
          }
        } else {
          PhysReg = VRM.getPhys(VirtReg);
          if (ReusedOperands.isClobbered(PhysReg)) {
            // Another def has taken the assigned physreg. It must have been a
            // use&def which got it due to reuse. Undo the reuse!
            PhysReg = ReusedOperands.GetRegForReload(VirtReg, PhysReg, &MI, 
                               Spills, MaybeDeadStores, RegKills, KillOps, VRM);
          }
        }

        assert(PhysReg && "VR not assigned a physical register?");
        RegInfo->setPhysRegUsed(PhysReg);
        unsigned RReg = SubIdx ? TRI->getSubReg(PhysReg, SubIdx) : PhysReg;
        ReusedOperands.markClobbered(RReg);
        MI.getOperand(i).setReg(RReg);
        MI.getOperand(i).setSubReg(0);

        if (!MO.isDead()) {
          MachineInstr *&LastStore = MaybeDeadStores[StackSlot];
          SpillRegToStackSlot(MBB, MII, -1, PhysReg, StackSlot, RC, true,
                            LastStore, Spills, ReMatDefs, RegKills, KillOps, VRM);

          // Check to see if this is a noop copy.  If so, eliminate the
          // instruction before considering the dest reg to be changed.
          {
            unsigned Src, Dst, SrcSR, DstSR;
            if (TII->isMoveInstr(MI, Src, Dst, SrcSR, DstSR) && Src == Dst) {
David Greene's avatar
David Greene committed
              DEBUG(dbgs() << "Removing now-noop copy: " << MI);
              InvalidateKills(MI, TRI, RegKills, KillOps);
              VRM.RemoveMachineInstrFromMaps(&MI);
              MBB.erase(&MI);
              Erased = true;
              UpdateKills(*LastStore, TRI, RegKills, KillOps);
      // Delete dead instructions without side effects.
      if (!Erased && !BackTracked && isSafeToDelete(MI)) {
        InvalidateKills(MI, TRI, RegKills, KillOps);
        VRM.RemoveMachineInstrFromMaps(&MI);
        MBB.erase(&MI);
        Erased = true;
      }
      if (!Erased)
        DistanceMap.insert(std::make_pair(&MI, Dist++));
      if (!Erased && !BackTracked) {
        for (MachineBasicBlock::iterator II = &MI; II != NextMII; ++II)
          UpdateKills(*II, TRI, RegKills, KillOps);
llvm::VirtRegRewriter* llvm::createVirtRegRewriter() {
  switch (RewriterOpt) {
  default: llvm_unreachable("Unreachable!");
  case local:
    return new LocalRewriter();