Skip to content
CodeGenPrepare.cpp 38.6 KiB
Newer Older
    // It is possible for very late stage optimizations (such as SimplifyCFG)
    // to introduce PHI nodes too late to be cleaned up.  If we detect such a
    // trivial PHI, go ahead and zap it here.
    if (Value *V = SimplifyInstruction(P)) {
      P->replaceAllUsesWith(V);
      P->eraseFromParent();
      ++NumPHIsElim;
    }
  } else if (CastInst *CI = dyn_cast<CastInst>(I)) {
    // If the source of the cast is a constant, then this should have
    // already been constant folded.  The only reason NOT to constant fold
    // it is if something (e.g. LSR) was careful to place the constant
    // evaluation in a block other than then one that uses it (e.g. to hoist
    // the address of globals out of a loop).  If this is the case, we don't
    // want to forward-subst the cast.
    if (isa<Constant>(CI->getOperand(0)))
      return false;

    bool Change = false;
    if (TLI) {
      Change = OptimizeNoopCopyExpression(CI, *TLI);
      MadeChange |= Change;
    }

    if (!Change && (isa<ZExtInst>(I) || isa<SExtInst>(I))) {
      MadeChange |= MoveExtToFormExtLoad(I);
      MadeChange |= OptimizeExtUses(I);
    }
  } else if (CmpInst *CI = dyn_cast<CmpInst>(I)) {
    MadeChange |= OptimizeCmpExpression(CI);
  } else if (LoadInst *LI = dyn_cast<LoadInst>(I)) {
    if (TLI)
      MadeChange |= OptimizeMemoryInst(I, I->getOperand(0), LI->getType(),
                                       SunkAddrs);
  } else if (StoreInst *SI = dyn_cast<StoreInst>(I)) {
    if (TLI)
      MadeChange |= OptimizeMemoryInst(I, SI->getOperand(1),
                                       SI->getOperand(0)->getType(),
                                       SunkAddrs);
  } else if (GetElementPtrInst *GEPI = dyn_cast<GetElementPtrInst>(I)) {
    if (GEPI->hasAllZeroIndices()) {
      /// The GEP operand must be a pointer, so must its result -> BitCast
      Instruction *NC = new BitCastInst(GEPI->getOperand(0), GEPI->getType(),
                                        GEPI->getName(), GEPI);
      GEPI->replaceAllUsesWith(NC);
      GEPI->eraseFromParent();
      ++NumGEPsElim;
      MadeChange = true;
      OptimizeInst(NC);
    }
  } else if (CallInst *CI = dyn_cast<CallInst>(I)) {
    MadeChange |= OptimizeCallInst(CI);
// In this pass we look for GEP and cast instructions that are used
// across basic blocks and rewrite them to improve basic-block-at-a-time
// selection.
bool CodeGenPrepare::OptimizeBlock(BasicBlock &BB) {
  bool MadeChange = false;
  // Split all critical edges where the dest block has a PHI.
  if (CriticalEdgeSplit) {
    TerminatorInst *BBTI = BB.getTerminator();
    if (BBTI->getNumSuccessors() > 1 && !isa<IndirectBrInst>(BBTI)) {
      for (unsigned i = 0, e = BBTI->getNumSuccessors(); i != e; ++i) {
        BasicBlock *SuccBB = BBTI->getSuccessor(i);
        if (isa<PHINode>(SuccBB->begin()) && isCriticalEdge(BBTI, i, true))
          SplitEdgeNicely(BBTI, i, BackEdges, this);
      }
  CurInstIterator = BB.begin();
  for (BasicBlock::iterator E = BB.end(); CurInstIterator != E; ) {
    Instruction *I = CurInstIterator++;

    if (CallInst *CI = dyn_cast<CallInst>(I))
      MadeChange |= OptimizeCallInst(CI);
    else