15 #define DEBUG_TYPE "aarch64-branch-fixup"
28 STATISTIC(NumSplit,
"Number of uncond branches inserted");
29 STATISTIC(NumCBrFixed,
"Number of cond branches fixed");
36 static inline unsigned UnknownPadding(
unsigned LogAlign,
unsigned KnownBits) {
37 if (KnownBits < LogAlign)
38 return (1u << LogAlign) - (1u << KnownBits);
52 struct BasicBlockInfo {
80 BasicBlockInfo() : Offset(0), Size(0), KnownBits(0), Unalign(0) {}
85 unsigned internalKnownBits()
const {
86 unsigned Bits = Unalign ? Unalign : KnownBits;
89 if (Size & ((1u << Bits) - 1))
97 unsigned postOffset(
unsigned LogAlign = 0)
const {
98 unsigned PO = Offset + Size;
110 unsigned postKnownBits(
unsigned LogAlign = 0)
const {
111 return std::max(LogAlign, internalKnownBits());
115 std::vector<BasicBlockInfo> BBInfo;
122 unsigned OffsetBits : 31;
124 ImmBranch(
MachineInstr *mi,
unsigned offsetbits,
bool cond)
125 :
MI(mi), OffsetBits(offsetbits), IsCond(cond) {}
130 std::vector<ImmBranch> ImmBranches;
140 virtual const char *getPassName()
const {
141 return "AArch64 branch fixup pass";
145 void initializeFunctionInfo();
149 unsigned OffsetBits);
150 bool fixupImmediateBr(ImmBranch &Br);
151 bool fixupConditionalBr(ImmBranch &Br);
168 assert(!MBBId || BBInfo[MBBId - 1].postOffset() <= BBInfo[MBBId].Offset);
174 void AArch64BranchFixup::dumpBBs() {
176 for (
unsigned J = 0, E = BBInfo.size(); J !=E; ++J) {
177 const BasicBlockInfo &BBI = BBInfo[J];
178 dbgs() <<
format(
"%08x BB#%u\t", BBI.Offset, J)
179 <<
" kb=" <<
unsigned(BBI.KnownBits)
181 <<
format(
" size=%#x\n", BBInfo[J].Size);
188 return new AArch64BranchFixup();
193 DEBUG(
dbgs() <<
"***** AArch64BranchFixup ******");
197 MF->getRegInfo().invalidateLiveness();
201 MF->RenumberBlocks();
205 initializeFunctionInfo();
208 unsigned NoBRIters = 0;
209 bool MadeChange =
false;
211 DEBUG(
dbgs() <<
"Beginning iteration #" << NoBRIters <<
'\n');
212 bool BRChange =
false;
213 for (
unsigned i = 0, e = ImmBranches.size(); i != e; ++i)
214 BRChange |= fixupImmediateBr(ImmBranches[i]);
215 if (BRChange && ++NoBRIters > 30)
255 void AArch64BranchFixup::initializeFunctionInfo() {
257 BBInfo.resize(MF->getNumBlockIDs());
268 BBInfo.front().KnownBits = MF->getAlignment();
271 adjustBBOffsetsAfter(MF->begin());
280 if (
I->isDebugValue())
283 int Opc =
I->getOpcode();
293 case AArch64::TBZxii:
294 case AArch64::TBZwii:
295 case AArch64::TBNZxii:
296 case AArch64::TBNZwii:
314 ImmBranches.push_back(ImmBranch(
I, Bits, IsCond));
323 BasicBlockInfo &BBI = BBInfo[MBB->
getNumber()];
329 BBI.Size +=
TII->getInstSizeInBytes(*
I);
332 if (
I->isInlineAsm())
340 unsigned AArch64BranchFixup::getOffsetOf(
MachineInstr *
MI)
const {
346 unsigned Offset = BBInfo[MBB->
getNumber()].Offset;
350 assert(
I != MBB->
end() &&
"Didn't find MI in its own basic block?");
351 Offset +=
TII->getInstSizeInBytes(*
I);
360 AArch64BranchFixup::splitBlockBeforeInstr(
MachineInstr *MI) {
386 MF->RenumberBlocks(NewBB);
390 BBInfo.insert(BBInfo.begin() + NewBB->
getNumber(), BasicBlockInfo());
397 computeBlockSize(OrigBB);
401 computeBlockSize(NewBB);
404 adjustBBOffsetsAfter(OrigBB);
411 for(
unsigned i = BBNum + 1, e = MF->getNumBlockIDs(); i < e; ++i) {
414 unsigned LogAlign = MF->getBlockNumbered(i)->getAlignment();
415 unsigned Offset = BBInfo[i - 1].postOffset(LogAlign);
416 unsigned KnownBits = BBInfo[i - 1].postKnownBits(LogAlign);
422 BBInfo[i].Offset == Offset &&
423 BBInfo[i].KnownBits == KnownBits)
426 BBInfo[i].Offset = Offset;
427 BBInfo[i].KnownBits = KnownBits;
435 unsigned OffsetBits) {
436 int64_t BrOffset = getOffsetOf(MI);
437 int64_t DestOffset = BBInfo[DestBB->
getNumber()].Offset;
441 <<
" bits available=" << OffsetBits
442 <<
" from " << getOffsetOf(MI) <<
" to " << DestOffset
443 <<
" offset " << int(DestOffset-BrOffset) <<
"\t" << *
MI);
445 return isIntN(OffsetBits, DestOffset - BrOffset);
450 bool AArch64BranchFixup::fixupImmediateBr(ImmBranch &Br) {
459 assert(DestBB &&
"Branch with no destination BB?");
462 if (isBBInRange(MI, DestBB, Br.OffsetBits))
465 assert(Br.IsCond &&
"Only conditional branches should need fixup");
466 return fixupConditionalBr(Br);
473 AArch64BranchFixup::fixupConditionalBr(ImmBranch &Br) {
476 unsigned CondBrMBBOperand = 0;
492 CondBrMBBOperand = 1;
502 case AArch64::TBZxii: InvertedOpcode = AArch64::TBNZxii;
break;
503 case AArch64::TBZwii: InvertedOpcode = AArch64::TBNZwii;
break;
504 case AArch64::TBNZxii: InvertedOpcode = AArch64::TBZxii;
break;
505 case AArch64::TBNZwii: InvertedOpcode = AArch64::TBZwii;
break;
506 case AArch64::CBZx: InvertedOpcode = AArch64::CBNZx;
break;
507 case AArch64::CBZw: InvertedOpcode = AArch64::CBNZw;
break;
508 case AArch64::CBNZx: InvertedOpcode = AArch64::CBZx;
break;
509 case AArch64::CBNZw: InvertedOpcode = AArch64::CBZw;
break;
516 CondBrMBBOperand = i;
520 MI = Br.MI = InvertedMI;
540 if (isBBInRange(MI, NewDest, Br.OffsetBits)) {
541 DEBUG(
dbgs() <<
" Invert Bcc condition and swap its destination with "
553 splitBlockBeforeInstr(MBBI);
556 int delta =
TII->getInstSizeInBytes(MBB->
back());
582 <<
" also invert condition and change dest. to BB#"
595 unsigned OffsetBits = 26 + 2;
596 ImmBranches.push_back(ImmBranch(&MBB->
back(), OffsetBits,
false));
598 adjustBBOffsetsAfter(MBB);
const MachineFunction * getParent() const
FunctionPass * createAArch64BranchFixupPass()
Returns an instance of the branch fixup pass.
MachineBasicBlock * getMBB() const
STATISTIC(NumSplit,"Number of uncond branches inserted")
LLVM_ATTRIBUTE_NORETURN void report_fatal_error(const char *reason, bool gen_crash_diag=true)
const HexagonInstrInfo * TII
#define llvm_unreachable(msg)
std::vector< MachineBasicBlock * >::iterator succ_iterator
ID
LLVM Calling Convention Representation.
unsigned getNumOperands() const
format_object1< T > format(const char *Fmt, const T &Val)
static bool BBHasFallthrough(MachineBasicBlock *MBB)
void transferSuccessors(MachineBasicBlock *fromMBB)
enable_if_c< std::numeric_limits< T >::is_integer &&!std::numeric_limits< T >::is_signed, std::size_t >::type countTrailingZeros(T Val, ZeroBehavior ZB=ZB_Width)
Count number of 0's from the least significant bit to the most stopping at the first 1...
const BasicBlock * getBasicBlock() const
const MachineBasicBlock * getParent() const
bundle_iterator< MachineInstr, instr_iterator > iterator
const MachineOperand & getOperand(unsigned i) const
void setMBB(MachineBasicBlock *MBB)
ItTy next(ItTy it, Dist n)
void setImm(int64_t immVal)
MachineInstrBuilder BuildMI(MachineFunction &MF, DebugLoc DL, const MCInstrDesc &MCID)
succ_iterator succ_begin()
bool isIntN(unsigned N, int64_t x)
static unsigned UnknownPadding(unsigned LogAlign, unsigned KnownBits)
bool isMBB() const
isMBB - Tests if this is a MO_MachineBasicBlock operand.
raw_ostream & dbgs()
dbgs - Return a circular-buffered debug stream.
void splice(iterator Where, MachineBasicBlock *Other, iterator From)
static A64CC::CondCodes A64InvertCondCode(A64CC::CondCodes CC)
instr_iterator insert(instr_iterator I, MachineInstr *M)
const MachineInstrBuilder & addOperand(const MachineOperand &MO) const
BasicBlockListType::iterator iterator
ItTy prior(ItTy it, Dist n)
void addSuccessor(MachineBasicBlock *succ, uint32_t weight=0)
DebugLoc getDebugLoc() const