16 #define DEBUG_TYPE "codegenprepare"
49 using namespace llvm::PatternMatch;
51 STATISTIC(NumBlocksElim,
"Number of blocks eliminated");
52 STATISTIC(NumPHIsElim,
"Number of trivial PHIs eliminated");
53 STATISTIC(NumGEPsElim,
"Number of GEPs converted to casts");
54 STATISTIC(NumCmpUses,
"Number of uses of Cmp expressions replaced with uses of "
56 STATISTIC(NumCastUses,
"Number of uses of Cast expressions replaced with uses "
58 STATISTIC(NumMemoryInsts,
"Number of memory instructions whose address "
59 "computations were sunk");
60 STATISTIC(NumExtsMoved,
"Number of [s|z]ext instructions combined with loads");
61 STATISTIC(NumExtUses,
"Number of uses of [s|z]ext instructions optimized");
62 STATISTIC(NumRetsDup,
"Number of return instructions duplicated");
63 STATISTIC(NumDbgValueMoved,
"Number of debug value instructions moved");
64 STATISTIC(NumSelectsExpanded,
"Number of selects turned into branches");
68 cl::desc(
"Disable branch optimizations in CodeGenPrepare"));
72 cl::desc(
"Disable select to branch conversion."));
108 const char *getPassName()
const {
return "CodeGen Prepare"; }
117 bool EliminateMostlyEmptyBlocks(
Function &
F);
119 void EliminateMostlyEmptyBlock(
BasicBlock *BB);
123 bool OptimizeInlineAsmInst(
CallInst *CS);
124 bool OptimizeCallInst(
CallInst *CI);
128 bool DupRetToEnableTailCallOpts(
BasicBlock *BB);
135 "Optimize for code generation",
false,
false)
141 return new CodeGenPrepare(TM);
144 bool CodeGenPrepare::runOnFunction(
Function &
F) {
145 bool EverMadeChange =
false;
148 if (
TM) TLI =
TM->getTargetLowering();
149 TLInfo = &getAnalysis<TargetLibraryInfo>();
150 DT = getAnalysisIfAvailable<DominatorTree>();
156 if (!OptSize && TLI && TLI->isSlowDivBypassed()) {
158 TLI->getBypassSlowDivWidths();
165 EverMadeChange |= EliminateMostlyEmptyBlocks(F);
170 EverMadeChange |= PlaceDbgValues(F);
172 bool MadeChange =
true;
177 MadeChange |= OptimizeBlock(*BB);
179 EverMadeChange |= MadeChange;
190 if (!MadeChange)
continue;
193 II = Successors.begin(),
IE = Successors.end(); II !=
IE; ++II)
199 MadeChange |= !WorkList.
empty();
200 while (!WorkList.
empty()) {
208 II = Successors.begin(),
IE = Successors.end(); II !=
IE; ++II)
215 if (EverMadeChange || MadeChange)
216 MadeChange |= EliminateFallThrough(F);
220 EverMadeChange |= MadeChange;
223 if (ModifiedDT && DT)
224 DT->DT->recalculate(F);
226 return EverMadeChange;
232 bool CodeGenPrepare::EliminateFallThrough(
Function &F) {
233 bool Changed =
false;
242 if (!SinglePred || SinglePred == BB || BB->
hasAddressTaken())
continue;
247 DEBUG(
dbgs() <<
"To merge:\n"<< *SinglePred <<
"\n\n\n");
268 bool CodeGenPrepare::EliminateMostlyEmptyBlocks(
Function &F) {
269 bool MadeChange =
false;
282 if (BBI != BB->
begin()) {
284 while (isa<DbgInfoIntrinsic>(BBI)) {
285 if (BBI == BB->
begin())
289 if (!isa<DbgInfoIntrinsic>(BBI) && !isa<PHINode>(BBI))
298 if (!CanMergeBlocks(BB, DestBB))
301 EliminateMostlyEmptyBlock(BB);
310 bool CodeGenPrepare::CanMergeBlocks(
const BasicBlock *BB,
316 while (
const PHINode *PN = dyn_cast<PHINode>(BBI++)) {
320 if (User->
getParent() != DestBB || !isa<PHINode>(User))
326 if (
const PHINode *UPN = dyn_cast<PHINode>(User))
327 for (
unsigned I = 0, E = UPN->getNumIncomingValues();
I != E; ++
I) {
330 Insn->
getParent() != UPN->getIncomingBlock(
I))
341 if (!DestBBPN)
return true;
345 if (
const PHINode *BBPN = dyn_cast<PHINode>(BB->
begin())) {
347 for (
unsigned i = 0, e = BBPN->getNumIncomingValues(); i != e; ++i)
348 BBPreds.
insert(BBPN->getIncomingBlock(i));
356 if (BBPreds.
count(Pred)) {
357 BBI = DestBB->
begin();
358 while (
const PHINode *PN = dyn_cast<PHINode>(BBI++)) {
359 const Value *V1 = PN->getIncomingValueForBlock(Pred);
360 const Value *
V2 = PN->getIncomingValueForBlock(BB);
363 if (
const PHINode *V2PN = dyn_cast<PHINode>(V2))
364 if (V2PN->getParent() == BB)
365 V2 = V2PN->getIncomingValueForBlock(Pred);
368 if (V1 != V2)
return false;
379 void CodeGenPrepare::EliminateMostlyEmptyBlock(
BasicBlock *BB) {
383 DEBUG(
dbgs() <<
"MERGING MOSTLY EMPTY BLOCKS - BEFORE:\n" << *BB << *DestBB);
388 if (SinglePred != DestBB) {
397 DEBUG(
dbgs() <<
"AFTER:\n" << *DestBB <<
"\n\n\n");
413 if (InValPhi && InValPhi->
getParent() == BB) {
422 for (
unsigned i = 0, e = BBPN->getNumIncomingValues(); i != e; ++i)
434 if (DT && !ModifiedDT) {
435 BasicBlock *BBIDom = DT->getNode(BB)->getIDom()->getBlock();
436 BasicBlock *DestBBIDom = DT->getNode(DestBB)->getIDom()->getBlock();
437 BasicBlock *NewIDom = DT->findNearestCommonDominator(BBIDom, DestBBIDom);
438 DT->changeImmediateDominator(DestBB, NewIDom);
444 DEBUG(
dbgs() <<
"AFTER:\n" << *DestBB <<
"\n\n\n");
465 if (SrcVT.
bitsLT(DstVT))
return false;
486 bool MadeChange =
false;
489 Use &TheUse = UI.getUse();
495 if (
PHINode *PN = dyn_cast<PHINode>(User)) {
503 if (UserBB == DefBB)
continue;
506 CastInst *&InsertedCast = InsertedCasts[UserBB];
517 TheUse = InsertedCast;
542 bool MadeChange =
false;
545 Use &TheUse = UI.getUse();
552 if (isa<PHINode>(User))
559 if (UserBB == DefBB)
continue;
562 CmpInst *&InsertedCmp = InsertedCmps[UserBB];
574 TheUse = InsertedCmp;
588 void replaceCall(
Value *With) {
589 CI->replaceAllUsesWith(With);
590 CI->eraseFromParent();
592 bool isFoldable(
unsigned SizeCIOp,
unsigned,
bool)
const {
594 dyn_cast<ConstantInt>(CI->getArgOperand(SizeCIOp)))
595 return SizeCI->isAllOnesValue();
601 bool CodeGenPrepare::OptimizeCallInst(
CallInst *CI) {
608 if (TLI->ExpandInlineAsm(CI)) {
610 CurInstIterator = BB->
begin();
617 if (OptimizeInlineAsmInst(CI))
624 bool Min = (cast<ConstantInt>(II->
getArgOperand(1))->getZExtValue() == 1);
631 WeakVH IterHandle(CurInstIterator);
634 TLInfo, ModifiedDT ? 0 : DT);
638 if (IterHandle != CurInstIterator) {
639 CurInstIterator = BB->
begin();
648 if (TLI->GetAddrModeArguments(II, PtrOps, AccessTy))
649 while (!PtrOps.
empty())
650 if (OptimizeMemoryInst(II, PtrOps.
pop_back_val(), AccessTy))
659 if (!TD)
return false;
665 CodeGenPrepareFortifiedLibCalls Simplifier;
666 return Simplifier.fold(CI, TD, TLInfo);
700 bool CodeGenPrepare::DupRetToEnableTailCallOpts(
BasicBlock *BB) {
736 do { ++BI; }
while (isa<DbgInfoIntrinsic>(BI));
744 while (isa<DbgInfoIntrinsic>(BI)) ++BI;
757 TLI->mayBeEmittedAsTailCall(CI))
763 if (!VisitedBBs.
insert(*PI))
769 do { ++RI; }
while (RI != RE && isa<DbgInfoIntrinsic>(&*RI));
774 if (CI && CI->
use_empty() && TLI->mayBeEmittedAsTailCall(CI))
779 bool Changed =
false;
780 for (
unsigned i = 0, e = TailCalls.
size(); i != e; ++i) {
787 if (
AttrBuilder(CalleeAttrs, AttributeSet::ReturnIndex).
789 AttrBuilder(CalleeAttrs, AttributeSet::ReturnIndex).
802 ModifiedDT = Changed =
true;
824 ExtAddrMode() : BaseReg(0), ScaledReg(0) {}
829 return (BaseReg == O.BaseReg) && (ScaledReg == O.ScaledReg) &&
830 (BaseGV == O.BaseGV) && (BaseOffs == O.BaseOffs) &&
831 (HasBaseReg == O.HasBaseReg) && (Scale == O.Scale);
843 bool NeedPlus =
false;
846 OS << (NeedPlus ?
" + " :
"")
853 OS << (NeedPlus ?
" + " :
"") << BaseOffs, NeedPlus =
true;
856 OS << (NeedPlus ?
" + " :
"")
862 OS << (NeedPlus ?
" + " :
"")
870 #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
881 class AddressingModeMatcher {
897 bool IgnoreProfitability;
902 : AddrModeInsts(AMI), TLI(T), AccessTy(AT), MemoryInst(MI),
AddrMode(AM) {
903 IgnoreProfitability =
false;
910 static ExtAddrMode Match(
Value *V,
Type *AccessTy,
917 AddressingModeMatcher(AddrModeInsts, TLI, AccessTy,
918 MemoryInst, Result).MatchAddr(V, 0);
919 (void)Success; assert(Success &&
"Couldn't select *anything*?");
923 bool MatchScaledValue(
Value *ScaleReg, int64_t Scale,
unsigned Depth);
924 bool MatchAddr(
Value *V,
unsigned Depth);
925 bool MatchOperationAddr(User *Operation,
unsigned Opcode,
unsigned Depth);
926 bool IsProfitableToFoldIntoAddressingMode(
Instruction *
I,
927 ExtAddrMode &AMBefore,
928 ExtAddrMode &AMAfter);
929 bool ValueAlreadyLiveAtInst(
Value *Val,
Value *KnownLive1,
Value *KnownLive2);
935 bool AddressingModeMatcher::MatchScaledValue(
Value *ScaleReg, int64_t Scale,
940 return MatchAddr(ScaleReg, Depth);
951 ExtAddrMode TestAddrMode =
AddrMode;
955 TestAddrMode.Scale += Scale;
956 TestAddrMode.ScaledReg = ScaleReg;
959 if (!TLI.isLegalAddressingMode(TestAddrMode, AccessTy))
969 if (isa<Instruction>(ScaleReg) &&
971 TestAddrMode.ScaledReg = AddLHS;
972 TestAddrMode.BaseOffs += CI->
getSExtValue()*TestAddrMode.Scale;
976 if (TLI.isLegalAddressingMode(TestAddrMode, AccessTy)) {
977 AddrModeInsts.push_back(cast<Instruction>(ScaleReg));
993 case Instruction::BitCast:
998 case Instruction::PtrToInt:
1001 case Instruction::IntToPtr:
1004 case Instruction::Add:
1006 case Instruction::Mul:
1007 case Instruction::Shl:
1010 case Instruction::GetElementPtr:
1020 bool AddressingModeMatcher::MatchOperationAddr(User *AddrInst,
unsigned Opcode,
1023 if (Depth >= 5)
return false;
1026 case Instruction::PtrToInt:
1028 return MatchAddr(AddrInst->
getOperand(0), Depth);
1029 case Instruction::IntToPtr:
1033 return MatchAddr(AddrInst->
getOperand(0), Depth);
1035 case Instruction::BitCast:
1044 return MatchAddr(AddrInst->
getOperand(0), Depth);
1046 case Instruction::Add: {
1048 ExtAddrMode BackupAddrMode =
AddrMode;
1049 unsigned OldSize = AddrModeInsts.size();
1050 if (MatchAddr(AddrInst->
getOperand(1), Depth+1) &&
1056 AddrModeInsts.resize(OldSize);
1059 if (MatchAddr(AddrInst->
getOperand(0), Depth+1) &&
1065 AddrModeInsts.resize(OldSize);
1071 case Instruction::Mul:
1072 case Instruction::Shl: {
1075 if (!RHS)
return false;
1077 if (Opcode == Instruction::Shl)
1078 Scale = 1LL << Scale;
1080 return MatchScaledValue(AddrInst->
getOperand(0), Scale, Depth);
1082 case Instruction::GetElementPtr: {
1085 int VariableOperand = -1;
1086 unsigned VariableScale = 0;
1088 int64_t ConstantOffset = 0;
1091 for (
unsigned i = 1, e = AddrInst->
getNumOperands(); i != e; ++i, ++GTI) {
1092 if (
StructType *STy = dyn_cast<StructType>(*GTI)) {
1095 cast<ConstantInt>(AddrInst->
getOperand(i))->getZExtValue();
1101 }
else if (TypeSize) {
1103 if (VariableOperand != -1)
1107 VariableOperand = i;
1108 VariableScale = TypeSize;
1115 if (VariableOperand == -1) {
1116 AddrMode.BaseOffs += ConstantOffset;
1117 if (ConstantOffset == 0 || TLI.isLegalAddressingMode(
AddrMode, AccessTy)){
1119 if (MatchAddr(AddrInst->
getOperand(0), Depth+1))
1122 AddrMode.BaseOffs -= ConstantOffset;
1127 ExtAddrMode BackupAddrMode =
AddrMode;
1128 unsigned OldSize = AddrModeInsts.size();
1131 AddrMode.BaseOffs += ConstantOffset;
1134 if (!MatchAddr(AddrInst->
getOperand(0), Depth+1)) {
1138 AddrModeInsts.resize(OldSize);
1146 if (!MatchScaledValue(AddrInst->
getOperand(VariableOperand), VariableScale,
1151 AddrModeInsts.resize(OldSize);
1156 AddrMode.BaseOffs += ConstantOffset;
1157 if (!MatchScaledValue(AddrInst->
getOperand(VariableOperand),
1158 VariableScale, Depth)) {
1161 AddrModeInsts.resize(OldSize);
1177 bool AddressingModeMatcher::MatchAddr(
Value *Addr,
unsigned Depth) {
1178 if (
ConstantInt *CI = dyn_cast<ConstantInt>(Addr)) {
1181 if (TLI.isLegalAddressingMode(
AddrMode, AccessTy))
1184 }
else if (
GlobalValue *GV = dyn_cast<GlobalValue>(Addr)) {
1188 if (TLI.isLegalAddressingMode(
AddrMode, AccessTy))
1192 }
else if (
Instruction *I = dyn_cast<Instruction>(Addr)) {
1193 ExtAddrMode BackupAddrMode =
AddrMode;
1194 unsigned OldSize = AddrModeInsts.size();
1197 if (MatchOperationAddr(I, I->
getOpcode(), Depth)) {
1202 IsProfitableToFoldIntoAddressingMode(I, BackupAddrMode,
AddrMode)) {
1203 AddrModeInsts.push_back(I);
1210 AddrModeInsts.resize(OldSize);
1212 }
else if (
ConstantExpr *CE = dyn_cast<ConstantExpr>(Addr)) {
1213 if (MatchOperationAddr(CE, CE->getOpcode(), Depth))
1215 }
else if (isa<ConstantPointerNull>(Addr)) {
1225 if (TLI.isLegalAddressingMode(
AddrMode, AccessTy))
1235 if (TLI.isLegalAddressingMode(
AddrMode, AccessTy))
1250 for (
unsigned i = 0, e = TargetConstraints.size(); i != e; ++i) {
1275 if (!ConsideredInsts.
insert(I))
1279 if (!MightBeFoldableInst(I))
1288 MemoryUses.push_back(std::make_pair(
LI, UI.getOperandNo()));
1292 if (
StoreInst *SI = dyn_cast<StoreInst>(U)) {
1293 unsigned opNo = UI.getOperandNo();
1294 if (opNo == 0)
return true;
1295 MemoryUses.push_back(std::make_pair(SI, opNo));
1299 if (
CallInst *CI = dyn_cast<CallInst>(U)) {
1301 if (!IA)
return true;
1304 if (!IsOperandAMemoryOperand(CI, IA, I, TLI))
1309 if (FindAllMemoryUses(cast<Instruction>(U), MemoryUses, ConsideredInsts,
1321 bool AddressingModeMatcher::ValueAlreadyLiveAtInst(
Value *Val,
Value *KnownLive1,
1322 Value *KnownLive2) {
1324 if (Val == 0 || Val == KnownLive1 || Val == KnownLive2)
1328 if (!isa<Instruction>(Val) && !isa<Argument>(Val))
return true;
1333 if (
AllocaInst *AI = dyn_cast<AllocaInst>(Val))
1334 if (AI->isStaticAlloca())
1364 bool AddressingModeMatcher::
1365 IsProfitableToFoldIntoAddressingMode(
Instruction *I, ExtAddrMode &AMBefore,
1366 ExtAddrMode &AMAfter) {
1367 if (IgnoreProfitability)
return true;
1378 Value *BaseReg = AMAfter.BaseReg, *ScaledReg = AMAfter.ScaledReg;
1382 if (ValueAlreadyLiveAtInst(BaseReg, AMBefore.BaseReg, AMBefore.ScaledReg))
1384 if (ValueAlreadyLiveAtInst(ScaledReg, AMBefore.BaseReg, AMBefore.ScaledReg))
1389 if (BaseReg == 0 && ScaledReg == 0)
1398 if (FindAllMemoryUses(I, MemoryUses, ConsideredInsts, TLI))
1406 for (
unsigned i = 0, e = MemoryUses.
size(); i != e; ++i) {
1408 unsigned OpNo = MemoryUses[i].second;
1421 AddressingModeMatcher Matcher(MatchedAddrModeInsts, TLI, AddressAccessTy,
1422 MemoryInst, Result);
1423 Matcher.IgnoreProfitability =
true;
1424 bool Success = Matcher.MatchAddr(Address, 0);
1425 (void)Success; assert(Success &&
"Couldn't select *anything*?");
1428 if (std::find(MatchedAddrModeInsts.
begin(), MatchedAddrModeInsts.
end(),
1429 I) == MatchedAddrModeInsts.
end())
1432 MatchedAddrModeInsts.
clear();
1457 bool CodeGenPrepare::OptimizeMemoryInst(
Instruction *MemoryInst,
Value *Addr,
1470 Value *Consensus = 0;
1471 unsigned NumUsesConsensus = 0;
1472 bool IsNumUsesConsensusValid =
false;
1475 while (!worklist.
empty()) {
1480 if (!Visited.
insert(V)) {
1486 if (
PHINode *
P = dyn_cast<PHINode>(V)) {
1487 for (
unsigned i = 0, e =
P->getNumIncomingValues(); i != e; ++i)
1494 ExtAddrMode NewAddrMode =
1495 AddressingModeMatcher::Match(V, AccessTy, MemoryInst,
1496 NewAddrModeInsts, *TLI);
1504 AddrMode = NewAddrMode;
1505 AddrModeInsts = NewAddrModeInsts;
1507 }
else if (NewAddrMode == AddrMode) {
1508 if (!IsNumUsesConsensusValid) {
1510 IsNumUsesConsensusValid =
true;
1519 if (NumUses > NumUsesConsensus) {
1521 NumUsesConsensus = NumUses;
1522 AddrModeInsts = NewAddrModeInsts;
1533 if (!Consensus)
return false;
1537 bool AnyNonLocal =
false;
1538 for (
unsigned i = 0, e = AddrModeInsts.
size(); i != e; ++i) {
1547 DEBUG(
dbgs() <<
"CGP: Found local addrmode: " << AddrMode <<
"\n");
1560 Value *&SunkAddr = SunkAddrs[Addr];
1562 DEBUG(
dbgs() <<
"CGP: Reusing nonlocal addrmode: " << AddrMode <<
" for "
1565 SunkAddr = Builder.CreateBitCast(SunkAddr, Addr->
getType());
1567 DEBUG(
dbgs() <<
"CGP: SINKING nonlocal addrmode: " << AddrMode <<
" for "
1577 if (AddrMode.BaseReg) {
1578 Value *V = AddrMode.BaseReg;
1580 V = Builder.CreatePtrToInt(V, IntPtrTy,
"sunkaddr");
1582 V = Builder.CreateIntCast(V, IntPtrTy,
true,
"sunkaddr");
1587 if (AddrMode.Scale) {
1588 Value *V = AddrMode.ScaledReg;
1589 if (V->
getType() == IntPtrTy) {
1592 V = Builder.CreatePtrToInt(V, IntPtrTy,
"sunkaddr");
1593 }
else if (cast<IntegerType>(IntPtrTy)->
getBitWidth() <
1595 V = Builder.CreateTrunc(V, IntPtrTy,
"sunkaddr");
1597 V = Builder.CreateSExt(V, IntPtrTy,
"sunkaddr");
1599 if (AddrMode.Scale != 1)
1603 Result = Builder.CreateAdd(Result, V,
"sunkaddr");
1609 if (AddrMode.BaseGV) {
1610 Value *V = Builder.CreatePtrToInt(AddrMode.BaseGV, IntPtrTy,
"sunkaddr");
1612 Result = Builder.CreateAdd(Result, V,
"sunkaddr");
1618 if (AddrMode.BaseOffs) {
1621 Result = Builder.CreateAdd(Result, V,
"sunkaddr");
1629 SunkAddr = Builder.CreateIntToPtr(Result, Addr->
getType(),
"sunkaddr");
1639 WeakVH IterHandle(CurInstIterator);
1644 if (IterHandle != CurInstIterator) {
1647 CurInstIterator = BB->
begin();
1658 bool CodeGenPrepare::OptimizeInlineAsmInst(
CallInst *CS) {
1659 bool MadeChange =
false;
1664 for (
unsigned i = 0, e = TargetConstraints.size(); i != e; ++i) {
1673 MadeChange |= OptimizeMemoryInst(CS, OpVal, OpVal->
getType());
1685 bool CodeGenPrepare::MoveExtToFormExtLoad(
Instruction *I) {
1688 if (!LI)
return false;
1704 if (isa<ZExtInst>(I))
1707 assert(isa<SExtInst>(I) &&
"Unexpected ext type!");
1721 bool CodeGenPrepare::OptimizeExtUses(
Instruction *I) {
1736 if (!isa<Instruction>(Src) || DefBB != cast<Instruction>(Src)->
getParent())
1739 bool DefIsLiveOut =
false;
1746 if (UserBB == DefBB)
continue;
1747 DefIsLiveOut =
true;
1758 if (UserBB == DefBB)
continue;
1761 if (isa<PHINode>(User) || isa<LoadInst>(User) || isa<StoreInst>(User))
1768 bool MadeChange =
false;
1771 Use &TheUse = UI.getUse();
1776 if (UserBB == DefBB)
continue;
1779 Instruction *&InsertedTrunc = InsertedTruncs[UserBB];
1781 if (!InsertedTrunc) {
1787 TheUse = InsertedTrunc;
1819 ((isa<LoadInst>(CmpOp0) && CmpOp0->
hasOneUse()) ||
1820 (isa<LoadInst>(CmpOp1) && CmpOp1->
hasOneUse()));
1826 bool CodeGenPrepare::OptimizeSelectInst(
SelectInst *SI) {
1878 CurInstIterator = StartBlock->
end();
1879 ++NumSelectsExpanded;
1883 bool CodeGenPrepare::OptimizeInst(
Instruction *I) {
1884 if (
PHINode *
P = dyn_cast<PHINode>(I)) {
1890 P->replaceAllUsesWith(V);
1891 P->eraseFromParent();
1898 if (
CastInst *CI = dyn_cast<CastInst>(I)) {
1911 if (isa<ZExtInst>(I) || isa<SExtInst>(
I)) {
1912 bool MadeChange = MoveExtToFormExtLoad(I);
1913 return MadeChange | OptimizeExtUses(I);
1918 if (
CmpInst *CI = dyn_cast<CmpInst>(I))
1921 if (
LoadInst *LI = dyn_cast<LoadInst>(I)) {
1927 if (
StoreInst *SI = dyn_cast<StoreInst>(I)) {
1929 return OptimizeMemoryInst(I, SI->
getOperand(1),
1935 if (GEPI->hasAllZeroIndices()) {
1938 GEPI->getName(), GEPI);
1940 GEPI->eraseFromParent();
1948 if (
CallInst *CI = dyn_cast<CallInst>(I))
1949 return OptimizeCallInst(CI);
1951 if (
SelectInst *SI = dyn_cast<SelectInst>(I))
1952 return OptimizeSelectInst(SI);
1960 bool CodeGenPrepare::OptimizeBlock(
BasicBlock &BB) {
1962 bool MadeChange =
false;
1964 CurInstIterator = BB.
begin();
1965 while (CurInstIterator != BB.
end())
1966 MadeChange |= OptimizeInst(CurInstIterator++);
1968 MadeChange |= DupRetToEnableTailCallOpts(&BB);
1976 bool CodeGenPrepare::PlaceDbgValues(
Function &F) {
1977 bool MadeChange =
false;
1984 PrevNonDbgInst = Insn;
1989 if (VI && VI != PrevNonDbgInst && !VI->
isTerminator()) {
1990 DEBUG(
dbgs() <<
"Moving Debug Value before :\n" << *DVI <<
' ' << *VI);
1992 if (isa<PHINode>(VI))
const Value * getCalledValue() const
void push_back(const T &Elt)
Type * getIndexedType() const
Instruction::CastOps getOpcode() const
Return the opcode of this CastInst.
class_match< Value > m_Value()
m_Value() - Match an arbitrary value and ignore it.
Abstract base class of comparison instructions.
AnalysisUsage & addPreserved()
void addIncoming(Value *V, BasicBlock *BB)
static PassRegistry * getPassRegistry()
LegalizeTypeAction getTypeAction(LLVMContext &Context, EVT VT) const
Sign extended before/after call.
Intrinsic::ID getIntrinsicID() const
virtual void ComputeConstraintToUse(AsmOperandInfo &OpInfo, SDValue Op, SelectionDAG *DAG=0) const
enable_if_c<!is_simple_type< Y >::value, typename cast_retty< X, const Y >::ret_type >::type dyn_cast(const Y &Val)
unsigned getNumOperands() const
void DeleteDeadBlock(BasicBlock *BB)
bool RecursivelyDeleteTriviallyDeadInstructions(Value *V, const TargetLibraryInfo *TLI=0)
void MergeBasicBlockIntoOnlyPred(BasicBlock *BB, Pass *P=0)
const Function * getParent() const
Return the enclosing method, or null if none.
unsigned getPointerAddressSpace() const
Get the address space of this pointer or pointer vector type.
bool hasAttribute(unsigned Index, Attribute::AttrKind Kind) const
Return true if the attribute exists at the given index.
bool bitsLT(EVT VT) const
bitsLT - Return true if this has less bits than VT.
LoopInfoBase< BlockT, LoopT > * LI
INITIALIZE_PASS_BEGIN(CodeGenPrepare,"codegenprepare","Optimize for code generation", false, false) INITIALIZE_PASS_END(CodeGenPrepare
Type * getPointerElementType() const
EVT getValueType(Type *Ty, bool AllowUnknown=false) const
static Constant * getNullValue(Type *Ty)
reverse_iterator rbegin()
void WriteAsOperand(raw_ostream &, const Value *, bool PrintTy=true, const Module *Context=0)
bool match(Val *V, const Pattern &P)
AnalysisUsage & addRequired()
#define INITIALIZE_PASS_DEPENDENCY(depName)
bool isUnconditional() const
FunctionPass * createCodeGenPreparePass(const TargetMachine *TM=0)
Value * removeIncomingValue(unsigned Idx, bool DeletePHIIfEmpty=true)
static unsigned getBitWidth(Type *Ty, const DataLayout *TD)
Value * getReturnValue() const
Convenience accessor. Returns null if there is no return value.
const StructLayout * getStructLayout(StructType *Ty) const
Base class of casting instructions.
T LLVM_ATTRIBUTE_UNUSED_RESULT pop_back_val()
TargetLowering::ConstraintType ConstraintType
#define INITIALIZE_PASS_END(passName, arg, name, cfg, analysis)
This provides a uniform API for creating instructions and inserting them into a basic block: either a...
static BranchInst * Create(BasicBlock *IfTrue, Instruction *InsertBefore=0)
static cl::opt< bool > DisableBranchOpts("disable-cgp-branch-opts", cl::Hidden, cl::init(false), cl::desc("Disable branch optimizations in CodeGenPrepare"))
bool hasAddressTaken() const
Returns true if there are any uses of this basic block other than direct branches, switches, etc. to it.
ID
LLVM Calling Convention Representation.
bool isInteger() const
isInteger - Return true if this is an integer, or a vector integer type.
bool replaceAndRecursivelySimplify(Instruction *I, Value *SimpleV, const DataLayout *TD=0, const TargetLibraryInfo *TLI=0, const DominatorTree *DT=0)
Replace all uses of 'I' with 'SimpleV' and simplify the uses recursively.
BinaryOp_match< LHS, RHS, Instruction::Add > m_Add(const LHS &L, const RHS &R)
Interval::succ_iterator succ_begin(Interval *I)
bool count(PtrType Ptr) const
count - Return true if the specified pointer is in the set.
bool LLVM_ATTRIBUTE_UNUSED_RESULT empty() const
This contains information for each constraint that we are lowering.
BasicBlock * getSuccessor(unsigned i) const
This class represents a no-op cast from one type to another.
class_match< ConstantInt > m_ConstantInt()
m_ConstantInt() - Match an arbitrary ConstantInt and ignore it.
bool isLoadExtLegal(unsigned ExtType, EVT VT) const
Return true if the specified load with extension is legal on this target.
void replaceAllUsesWith(Value *V)
virtual bool isSelectSupported(SelectSupportKind) const
Optimize for code generation
This class represents a truncation of integer types.
Considered to not alias after call.
unsigned getNumIncomingValues() const
Interval::succ_iterator succ_end(Interval *I)
void replaceUsesOfWith(Value *From, Value *To)
uint64_t getElementOffset(unsigned Idx) const
static CastInst * Create(Instruction::CastOps, Value *S, Type *Ty, const Twine &Name="", Instruction *InsertBefore=0)
Construct any of the CastInst subclasses.
bool isTypeLegal(EVT VT) const
initializer< Ty > init(const Ty &Val)
void dump(const SparseBitVector< ElementSize > &LHS, raw_ostream &out)
void insertBefore(Instruction *InsertPos)
LLVM Basic Block Representation.
LLVM Constant Representation.
const Value * getCondition() const
Interval::pred_iterator pred_begin(Interval *I)
virtual bool isTruncateFree(Type *, Type *) const
static PHINode * Create(Type *Ty, unsigned NumReservedValues, const Twine &NameStr="", Instruction *InsertBefore=0)
BasicBlock * getIncomingBlock(unsigned i) const
const DataLayout * getDataLayout() const
static bool OptimizeNoopCopyExpression(CastInst *CI, const TargetLowering &TLI)
for(unsigned i=0, e=MI->getNumOperands();i!=e;++i)
Value * getOperand(unsigned i) const
Zero extended before/after call.
Interval::pred_iterator pred_end(Interval *I)
bool isPredictableSelectExpensive() const
Predicate getPredicate() const
Return the predicate for this instruction.
bool LLVM_ATTRIBUTE_UNUSED_RESULT empty() const
Value * SimplifyInstruction(Instruction *I, const DataLayout *TD=0, const TargetLibraryInfo *TLI=0, const DominatorTree *DT=0)
LLVMContext & getContext() const
All values hold a context through their type.
bool isUsedInBasicBlock(const BasicBlock *BB) const
const Value * getTrueValue() const
static bool OptimizeCmpExpression(CmpInst *CI)
bool isTerminator() const
bool isConditional() const
IntegerType * getIntPtrType(LLVMContext &C, unsigned AddressSpace=0) const
std::vector< AsmOperandInfo > AsmOperandInfoVector
void initializeCodeGenPreparePass(PassRegistry &)
static bool IsNonLocalValue(Value *V, BasicBlock *BB)
Class for constant integers.
Value * getIncomingValue(unsigned i) const
uint64_t getTypeAllocSize(Type *Ty) const
void eraseFromParent()
Unlink 'this' from the containing function and delete it.
static cl::opt< bool > DisableSelectToBranch("disable-cgp-select2branch", cl::Hidden, cl::init(false), cl::desc("Disable select to branch conversion."))
std::reverse_iterator< iterator > reverse_iterator
static Constant * get(Type *Ty, uint64_t V, bool isSigned=false)
Function * getCalledFunction() const
const BasicBlock & getEntryBlock() const
bool ConstantFoldTerminator(BasicBlock *BB, bool DeleteDeadConditions=false, const TargetLibraryInfo *TLI=0)
raw_ostream & dbgs()
dbgs - Return a circular-buffered debug stream.
AttributeSet getAttributes() const
Return the attribute list for this Function.
Value * getArgOperand(unsigned i) const
STATISTIC(NumBlocksElim,"Number of blocks eliminated")
BasicBlock * getSinglePredecessor()
Return this block if it has a single predecessor block. Otherwise return a null pointer.
AddrMode
ARM Addressing Modes.
static CmpInst * Create(OtherOps Op, unsigned short predicate, Value *S1, Value *S2, const Twine &Name="", Instruction *InsertBefore=0)
Create a CmpInst.
SelectSupportKind
Enum that describes what type of support for selects the target has.
ImmutableCallSite - establish a view to a call site for examination.
void insertAfter(Instruction *InsertPos)
TerminatorInst * getTerminator()
Returns the terminator instruction if the block is well formed or null if the block is not well forme...
static BasicBlock * Create(LLVMContext &Context, const Twine &Name="", Function *Parent=0, BasicBlock *InsertBefore=0)
Creates a new BasicBlock.
ReturnInst * FoldReturnIntoUncondBranch(ReturnInst *RI, BasicBlock *BB, BasicBlock *Pred)
BasicBlock * splitBasicBlock(iterator I, const Twine &BBName="")
Split the basic block into two basic blocks at the specified instruction.
bool bypassSlowDivision(Function &F, Function::iterator &I, const DenseMap< unsigned int, unsigned int > &BypassWidth)
raw_ostream & operator<<(raw_ostream &OS, const APInt &I)
OtherOps getOpcode() const
Get the opcode casted to the right type.
virtual AsmOperandInfoVector ParseConstraints(ImmutableCallSite CS) const
EVT getTypeToTransformTo(LLVMContext &Context, EVT VT) const
LLVM Value Representation.
unsigned getOpcode() const
getOpcode() returns a member of one of the enums like Instruction::Add.
static bool isFormingBranchFromSelectProfitable(SelectInst *SI)
static const Function * getParent(const Value *V)
const Value * getValue() const
const Value * getFalseValue() const
bool operator==(uint64_t V1, const APInt &V2)
iterator getFirstInsertionPt()
Returns an iterator to the first instruction in this block that is suitable for inserting a non-PHI i...
unsigned getNumUses() const
int64_t getSExtValue() const
Return the sign extended value.
void moveBefore(BasicBlock *MovePos)
Unlink this basic block from its current function and insert it into the function that MovePos lives ...
const BasicBlock * getParent() const
InstListType::iterator iterator
Instruction iterators...
INITIALIZE_PASS(GlobalMerge,"global-merge","Global Merge", false, false) bool GlobalMerge const DataLayout * TD
gep_type_iterator gep_type_begin(const User *GEP)