69 #define DEBUG_TYPE "peephole-opt"
87 cl::desc(
"Aggressive extension optimization"));
91 cl::desc(
"Disable the peephole optimizer"));
93 STATISTIC(NumReuse,
"Number of extension results reused");
94 STATISTIC(NumCmps,
"Number of compares eliminated");
95 STATISTIC(NumImmFold,
"Number of move immediate folded");
96 STATISTIC(NumLoadFold,
"Number of loads folded");
97 STATISTIC(NumSelects,
"Number of selects optimized");
98 STATISTIC(NumCopiesBitcasts,
"Number of copies/bitcasts optimized");
136 bool isLoadFoldable(
MachineInstr *
MI,
unsigned &FoldAsLoadDefReg);
143 "Peephole Optimizations",
false,
false)
157 bool PeepholeOptimizer::
160 unsigned SrcReg, DstReg, SubIdx;
161 if (!
TII->isCoalescableExtInstr(*MI, SrcReg, DstReg, SubIdx))
168 if (
MRI->hasOneNonDBGUse(SrcReg))
175 DstRC =
TM->getRegisterInfo()->getSubClassWithSubReg(DstRC, SubIdx);
184 bool UseSrcSubIdx =
TM->getRegisterInfo()->
191 UI =
MRI->use_nodbg_begin(DstReg), UE =
MRI->use_nodbg_end();
193 ReachedBBs.
insert(UI->getParent());
201 bool ExtendLife =
true;
203 UI =
MRI->use_nodbg_begin(SrcReg), UE =
MRI->use_nodbg_end();
206 MachineInstr *UseMI = &*UI;
210 if (UseMI->
isPHI()) {
216 if (UseSrcSubIdx && UseMO.
getSubReg() != SubIdx)
242 if (!LocalMIs.count(UseMI))
244 }
else if (ReachedBBs.count(UseMBB)) {
248 }
else if (
Aggressive && DT->dominates(MBB, UseMBB)) {
260 if (ExtendLife && !ExtendedUses.
empty())
262 std::copy(ExtendedUses.
begin(), ExtendedUses.
end(),
263 std::back_inserter(Uses));
266 bool Changed =
false;
274 UI =
MRI->use_nodbg_begin(DstReg), UE =
MRI->use_nodbg_end();
277 PHIBBs.
insert(UI->getParent());
280 for (
unsigned i = 0, e = Uses.
size(); i != e; ++i) {
282 MachineInstr *UseMI = UseMO->
getParent();
284 if (PHIBBs.
count(UseMBB))
289 MRI->clearKillFlags(DstReg);
290 MRI->constrainRegClass(DstReg, DstRC);
293 unsigned NewVR =
MRI->createVirtualRegister(RC);
296 .addReg(DstReg, 0, SubIdx);
319 unsigned SrcReg, SrcReg2;
320 int CmpMask, CmpValue;
327 if (
TII->optimizeCompareInstr(MI, SrcReg, SrcReg2, CmpMask, CmpValue,
MRI)) {
336 bool PeepholeOptimizer::optimizeSelect(
MachineInstr *MI) {
338 unsigned FalseOp = 0;
339 bool Optimizable =
false;
341 if (
TII->analyzeSelect(MI, Cond, TrueOp, FalseOp, Optimizable))
345 if (!
TII->optimizeSelect(MI))
358 unsigned SrcSubReg) {
364 unsigned SrcIdx, DefIdx;
365 if (SrcSubReg && DefSubReg)
367 SrcIdx, DefIdx) != NULL;
389 unsigned &DefIdx,
unsigned &SrcIdx) {
390 assert((Copy.
isCopy() || Copy.
isBitcast()) &&
"Wrong operation type.");
406 for (
unsigned OpIdx = 0, EndOpIdx = SrcIdx; OpIdx != EndOpIdx; ++OpIdx) {
412 else if (SrcIdx != EndOpIdx)
433 bool PeepholeOptimizer::optimizeCopyOrBitcast(
MachineInstr *MI) {
434 unsigned DefIdx, SrcIdx;
439 assert(MODef.
isReg() &&
"Copies must be between registers.");
450 bool ShouldRewrite =
false;
457 unsigned CopyDefIdx, CopySrcIdx;
461 assert(MO.
isReg() &&
"Copies must be between registers.");
474 Copy =
MRI->getVRegDef(Src);
475 }
while (!ShouldRewrite && Copy && (Copy->
isCopy() || Copy->
isBitcast()));
483 MRI->createVirtualRegister(DefRC);
486 .addReg(Src, 0, SrcSubReg);
489 MRI->replaceRegWith(Def, NewVR);
490 MRI->clearKillFlags(NewVR);
499 bool PeepholeOptimizer::isLoadFoldable(
MachineInstr *MI,
500 unsigned &FoldAsLoadDefReg) {
513 MRI->hasOneUse(Reg)) {
514 FoldAsLoadDefReg =
Reg;
520 bool PeepholeOptimizer::isMoveImmediate(
MachineInstr *MI,
530 ImmDefMIs.
insert(std::make_pair(Reg, MI));
548 unsigned Reg = MO.
getReg();
551 if (ImmDefRegs.
count(Reg) == 0)
554 assert(II != ImmDefMIs.
end());
555 if (
TII->FoldImmediate(MI, II->second, Reg,
MRI)) {
564 DEBUG(
dbgs() <<
"********** PEEPHOLE OPTIMIZER **********\n");
571 TII =
TM->getInstrInfo();
573 DT =
Aggressive ? &getAnalysis<MachineDominatorTree>() : 0;
575 bool Changed =
false;
580 unsigned FoldAsLoadDefReg;
584 bool SeenMoveImm =
false;
588 FoldAsLoadDefReg = 0;
591 MII =
I->begin(), MIE =
I->end();
MII != MIE; ) {
602 FoldAsLoadDefReg = 0;
606 FoldAsLoadDefReg = 0;
608 if (((MI->
isBitcast() || MI->
isCopy()) && optimizeCopyOrBitcast(MI)) ||
609 (MI->
isCompare() && optimizeCmpInstr(MI, MBB)) ||
610 (MI->
isSelect() && optimizeSelect(MI))) {
617 if (isMoveImmediate(MI, ImmDefRegs, ImmDefMIs)) {
620 Changed |= optimizeExtInstr(MI, MBB, LocalMIs);
627 Changed |= foldImmediate(MI, MBB, ImmDefRegs, ImmDefMIs);
633 if (!isLoadFoldable(MI, FoldAsLoadDefReg) && FoldAsLoadDefReg) {
638 FoldAsLoadDefReg, DefMI);
644 LocalMIs.
erase(DefMI);
void push_back(const T &Elt)
AnalysisUsage & addPreserved()
MachineInstr * getParent()
static PassRegistry * getPassRegistry()
unsigned getNumDefs() const
Return the number of MachineOperands that are register definitions. Register definitions always occur...
bool mayStore(QueryType Type=AnyInBundle) const
void setIsUndef(bool Val=true)
static bool isVirtualRegister(unsigned Reg)
const MCInstrDesc & getDesc() const
const TargetRegisterClass * getCommonSubClass(const TargetRegisterClass *A, const TargetRegisterClass *B) const
bool canFoldAsLoad(QueryType Type=IgnoreBundle) const
bool isSelect(QueryType Type=IgnoreBundle) const
AnalysisUsage & addRequired()
#define INITIALIZE_PASS_DEPENDENCY(depName)
const HexagonInstrInfo * TII
static cl::opt< bool > Aggressive("aggressive-ext-opt", cl::Hidden, cl::desc("Aggressive extension optimization"))
#define INITIALIZE_PASS_END(passName, arg, name, cfg, analysis)
bool isReg() const
isReg - Tests if this is a MO_Register operand.
bool mayLoad(QueryType Type=AnyInBundle) const
ID
LLVM Calling Convention Representation.
bool count(PtrType Ptr) const
count - Return true if the specified pointer is in the set.
bool LLVM_ATTRIBUTE_UNUSED_RESULT empty() const
void initializePeepholeOptimizerPass(PassRegistry &)
bool isBitcast(QueryType Type=IgnoreBundle) const
virtual const TargetRegisterClass * getMatchingSuperRegClass(const TargetRegisterClass *A, const TargetRegisterClass *B, unsigned Idx) const
const MachineBasicBlock * getParent() const
bool isDebugValue() const
bool isImplicitDef() const
const TargetRegisterClass * getCommonSuperRegClass(const TargetRegisterClass *RCA, unsigned SubA, const TargetRegisterClass *RCB, unsigned SubB, unsigned &PreA, unsigned &PreB) const
bundle_iterator< MachineInstr, instr_iterator > iterator
initializer< Ty > init(const Ty &Val)
const MCRegisterClass & getRegClass(unsigned i) const
Returns the register class associated with the enumeration value. See class MCOperandInfo.
const MachineOperand & getOperand(unsigned i) const
bool hasUnmodeledSideEffects() const
virtual bool analyzeCompare(const MachineInstr *MI, unsigned &SrcReg, unsigned &SrcReg2, int &Mask, int &Value) const
For a comparison instruction, return the source registers in SrcReg and SrcReg2 if having two registe...
MachineInstrBuilder BuildMI(MachineFunction &MF, DebugLoc DL, const MCInstrDesc &MCID)
unsigned getSubReg() const
std::pair< iterator, bool > insert(const std::pair< KeyT, ValueT > &KV)
static bool getCopyOrBitcastDefUseIdx(const MachineInstr &Copy, unsigned &DefIdx, unsigned &SrcIdx)
Get the index of the definition and source for Copy instruction.
bool isCompare(QueryType Type=IgnoreBundle) const
isCompare - Return true if this instruction is a comparison.
raw_ostream & dbgs()
dbgs - Return a circular-buffered debug stream.
void swap(llvm::BitVector &LHS, llvm::BitVector &RHS)
Implement std::swap in terms of BitVector swap.
char & PeepholeOptimizerID
bool count(const T &V) const
count - Return true if the element is in the set.
static bool isPhysicalRegister(unsigned Reg)
MachineRegisterInfo & getRegInfo()
virtual void getAnalysisUsage(AnalysisUsage &AU) const
static cl::opt< bool > DisablePeephole("disable-peephole", cl::Hidden, cl::init(false), cl::desc("Disable the peephole optimizer"))
void setReg(unsigned Reg)
bool isCall(QueryType Type=AnyInBundle) const
void setSubReg(unsigned subReg)
const TargetMachine & getTarget() const
peephole Peephole Optimizations
INITIALIZE_PASS_BEGIN(PeepholeOptimizer,"peephole-opts","Peephole Optimizations", false, false) INITIALIZE_PASS_END(PeepholeOptimizer
unsigned getReg() const
getReg - Returns the register number.
STATISTIC(NumReuse,"Number of extension results reused")
static bool shareSameRegisterFile(const TargetRegisterInfo &TRI, const TargetRegisterClass *DefRC, unsigned DefSubReg, const TargetRegisterClass *SrcRC, unsigned SrcSubReg)
Check if the registers defined by the pair (RegisterClass, SubReg) share the same register file...
unsigned getNumOperands() const
Return the number of declared MachineOperands for this MachineInstruction. Note that variadic (isVari...
BasicBlockListType::iterator iterator
const MCRegisterInfo & MRI
StringRef getName() const
bool isMoveImmediate(QueryType Type=IgnoreBundle) const
iterator find(const KeyT &Val)
DebugLoc getDebugLoc() const