16 #define DEBUG_TYPE "post-RA-sched"
33 TII(MF.getTarget().getInstrInfo()),
34 TRI(MF.getTarget().getRegisterInfo()),
37 KillIndices(TRI->getNumRegs(), 0),
38 DefIndices(TRI->getNumRegs(), 0),
39 KeepRegs(TRI->getNumRegs(),
false) {}
45 const unsigned BBSize = BB->
size();
46 for (
unsigned i = 0, e = TRI->
getNumRegs(); i != e; ++i) {
52 DefIndices[i] = BBSize;
58 bool IsReturnBlock = (BBSize != 0 && BB->
back().
isReturn());
64 E = (*SI)->livein_end();
I != E; ++
I) {
68 KillIndices[
Reg] = BBSize;
69 DefIndices[
Reg] = ~0u;
79 if (!IsReturnBlock && !Pristine.
test(*
I))
continue;
83 KillIndices[
Reg] = BBSize;
84 DefIndices[
Reg] = ~0u;
95 unsigned InsertPosIndex) {
98 assert(Count < InsertPosIndex &&
"Instruction index out of expected range!");
101 if (KillIndices[
Reg] != ~0u) {
106 KillIndices[
Reg] = Count;
107 }
else if (DefIndices[
Reg] < InsertPosIndex && DefIndices[
Reg] >= Count) {
116 DefIndices[
Reg] = InsertPosIndex;
120 PrescanInstruction(MI);
121 ScanInstruction(MI, Count);
127 const SDep *Next = 0;
128 unsigned NextDepth = 0;
132 const SUnit *PredSU =
P->getSUnit();
133 unsigned PredLatency =
P->getLatency();
134 unsigned PredTotalLatency = PredSU->
getDepth() + PredLatency;
137 if (NextDepth < PredTotalLatency ||
138 (NextDepth == PredTotalLatency &&
P->getKind() ==
SDep::Anti)) {
139 NextDepth = PredTotalLatency;
146 void CriticalAntiDepBreaker::PrescanInstruction(
MachineInstr *
MI) {
163 bool Special = MI->
isCall() ||
171 if (!MO.
isReg())
continue;
173 if (Reg == 0)
continue;
176 if (i < MI->getDesc().getNumOperands())
181 if (!Classes[Reg] && NewRC)
182 Classes[
Reg] = NewRC;
183 else if (!NewRC || Classes[Reg] != NewRC)
191 unsigned AliasReg = *AI;
192 if (Classes[AliasReg]) {
199 if (Classes[Reg] != reinterpret_cast<TargetRegisterClass *>(-1))
200 RegRefs.insert(std::make_pair(Reg, &MO));
202 if (MO.
isUse() && Special) {
203 if (!KeepRegs.
test(Reg)) {
206 KeepRegs.
set(*SubRegs);
212 void CriticalAntiDepBreaker::ScanInstruction(
MachineInstr *MI,
225 for (
unsigned i = 0, e = TRI->getNumRegs(); i != e; ++i)
227 DefIndices[i] = Count;
228 KillIndices[i] = ~0u;
234 if (!MO.
isReg())
continue;
235 unsigned Reg = MO.
getReg();
236 if (Reg == 0)
continue;
237 if (!MO.
isDef())
continue;
241 DefIndices[
Reg] = Count;
242 KillIndices[
Reg] = ~0u;
243 assert(((KillIndices[Reg] == ~0u) !=
244 (DefIndices[Reg] == ~0u)) &&
245 "Kill and Def maps aren't consistent for Reg!");
251 unsigned SubregReg = *SubRegs;
252 DefIndices[SubregReg] = Count;
253 KillIndices[SubregReg] = ~0u;
254 KeepRegs.
reset(SubregReg);
255 Classes[SubregReg] = 0;
256 RegRefs.erase(SubregReg);
260 Classes[*SR] = reinterpret_cast<TargetRegisterClass *>(-1);
265 if (!MO.
isReg())
continue;
266 unsigned Reg = MO.
getReg();
267 if (Reg == 0)
continue;
268 if (!MO.
isUse())
continue;
271 if (i < MI->getDesc().getNumOperands())
276 if (!Classes[Reg] && NewRC)
277 Classes[
Reg] = NewRC;
278 else if (!NewRC || Classes[Reg] != NewRC)
281 RegRefs.insert(std::make_pair(Reg, &MO));
284 if (KillIndices[Reg] == ~0u) {
285 KillIndices[
Reg] = Count;
286 DefIndices[
Reg] = ~0u;
287 assert(((KillIndices[Reg] == ~0u) !=
288 (DefIndices[Reg] == ~0u)) &&
289 "Kill and Def maps aren't consistent for Reg!");
293 unsigned AliasReg = *AI;
294 if (KillIndices[AliasReg] == ~0u) {
295 KillIndices[AliasReg] = Count;
296 DefIndices[AliasReg] = ~0u;
314 CriticalAntiDepBreaker::isNewRegClobberedByRefs(RegRefIter RegRefBegin,
315 RegRefIter RegRefEnd,
318 for (RegRefIter
I = RegRefBegin;
I != RegRefEnd; ++
I ) {
335 if (!CheckOper.
isReg() || !CheckOper.
isDef() ||
336 CheckOper.
getReg() != NewReg)
341 if (RefOper->
isDef())
358 unsigned CriticalAntiDepBreaker::
359 findSuitableFreeRegister(RegRefIter RegRefBegin,
360 RegRefIter RegRefEnd,
367 for (
unsigned i = 0; i != Order.
size(); ++i) {
368 unsigned NewReg = Order[i];
370 if (NewReg == AntiDepReg)
continue;
374 if (NewReg == LastNewReg)
continue;
378 if (isNewRegClobberedByRefs(RegRefBegin, RegRefEnd, NewReg))
continue;
381 assert(((KillIndices[AntiDepReg] == ~0u) != (DefIndices[AntiDepReg] == ~0u))
382 &&
"Kill and Def maps aren't consistent for AntiDepReg!");
383 assert(((KillIndices[NewReg] == ~0u) != (DefIndices[NewReg] == ~0u))
384 &&
"Kill and Def maps aren't consistent for NewReg!");
385 if (KillIndices[NewReg] != ~0u ||
386 Classes[NewReg] == reinterpret_cast<TargetRegisterClass *>(-1) ||
387 KillIndices[AntiDepReg] > DefIndices[NewReg])
390 bool Forbidden =
false;
392 ite = Forbid.
end(); it != ite; ++it)
393 if (TRI->regsOverlap(NewReg, *it)) {
397 if (Forbidden)
continue;
409 unsigned InsertPosIndex,
413 if (SUnits.empty())
return 0;
422 const SUnit *Max = 0;
423 for (
unsigned i = 0, e = SUnits.size(); i != e; ++i) {
424 const SUnit *SU = &SUnits[i];
432 DEBUG(
dbgs() <<
"Critical path has total latency "
435 for (
unsigned Reg = 0; Reg < TRI->getNumRegs(); ++
Reg) {
436 if (KillIndices[Reg] == ~0u)
437 DEBUG(
dbgs() <<
" " << TRI->getName(Reg));
445 const SUnit *CriticalPathSU = Max;
489 std::vector<unsigned> LastNewReg(TRI->getNumRegs(), 0);
495 unsigned Count = InsertPosIndex - 1;
515 unsigned AntiDepReg = 0;
516 if (MI == CriticalPathMI) {
518 const SUnit *NextSU = Edge->getSUnit();
522 AntiDepReg = Edge->getReg();
523 assert(AntiDepReg != 0 &&
"Anti-dependence on reg0?");
527 else if (KeepRegs.
test(AntiDepReg))
541 PE = CriticalPathSU->
Preds.end();
P != PE; ++
P)
542 if (
P->getSUnit() == NextSU ?
543 (
P->getKind() !=
SDep::Anti ||
P->getReg() != AntiDepReg) :
544 (
P->getKind() ==
SDep::Data &&
P->getReg() == AntiDepReg)) {
550 CriticalPathSU = NextSU;
551 CriticalPathMI = CriticalPathSU->
getInstr();
559 PrescanInstruction(MI);
571 else if (AntiDepReg) {
578 if (!MO.
isReg())
continue;
579 unsigned Reg = MO.
getReg();
580 if (Reg == 0)
continue;
581 if (MO.
isUse() && TRI->regsOverlap(AntiDepReg, Reg)) {
585 if (MO.
isDef() && Reg != AntiDepReg)
593 assert((AntiDepReg == 0 || RC != NULL) &&
594 "Register should be live if it's causing an anti-dependence!");
595 if (RC == reinterpret_cast<TargetRegisterClass *>(-1))
602 if (AntiDepReg != 0) {
603 std::pair<std::multimap<unsigned, MachineOperand *>::iterator,
604 std::multimap<unsigned, MachineOperand *>::iterator>
605 Range = RegRefs.equal_range(AntiDepReg);
606 if (
unsigned NewReg = findSuitableFreeRegister(Range.first, Range.second,
608 LastNewReg[AntiDepReg],
610 DEBUG(
dbgs() <<
"Breaking anti-dependence edge on "
611 << TRI->getName(AntiDepReg)
612 <<
" with " << RegRefs.count(AntiDepReg) <<
" references"
613 <<
" using " << TRI->getName(NewReg) <<
"!\n");
617 for (std::multimap<unsigned, MachineOperand *>::iterator
618 Q = Range.first, QE = Range.second; Q != QE; ++Q) {
619 Q->second->setReg(NewReg);
623 const SUnit *SU = MISUnitMap[Q->second->getParent()];
625 for (DbgValueVector::iterator DVI = DbgValues.begin(),
626 DVE = DbgValues.end(); DVI != DVE; ++DVI)
627 if (DVI->second == Q->second->getParent())
634 Classes[NewReg] = Classes[AntiDepReg];
635 DefIndices[NewReg] = DefIndices[AntiDepReg];
636 KillIndices[NewReg] = KillIndices[AntiDepReg];
637 assert(((KillIndices[NewReg] == ~0u) !=
638 (DefIndices[NewReg] == ~0u)) &&
639 "Kill and Def maps aren't consistent for NewReg!");
641 Classes[AntiDepReg] = 0;
642 DefIndices[AntiDepReg] = KillIndices[AntiDepReg];
643 KillIndices[AntiDepReg] = ~0u;
644 assert(((KillIndices[AntiDepReg] == ~0u) !=
645 (DefIndices[AntiDepReg] == ~0u)) &&
646 "Kill and Def maps aren't consistent for AntiDepReg!");
648 RegRefs.erase(AntiDepReg);
649 LastNewReg[AntiDepReg] = NewReg;
654 ScanInstruction(MI, Count);
void push_back(const T &Elt)
MachineInstr * getParent()
bool isValid() const
isValid - returns true if this iterator is not yet at the end.
void Observe(MachineInstr *MI, unsigned Count, unsigned InsertPosIndex)
void UpdateDbgValue(MachineInstr *MI, unsigned OldReg, unsigned NewReg)
std::vector< unsigned >::const_iterator livein_iterator
MachineInstr * getInstr() const
const MCInstrDesc & getDesc() const
void FinishBlock()
Finish - Finish anti-dep breaking for a basic block.
SmallVector< SDep, 4 > Preds
A register anti-dependedence (aka WAR).
virtual const MCPhysReg * getCalleeSavedRegs(const MachineFunction *MF=0) const =0
const HexagonInstrInfo * TII
bool isReg() const
isReg - Tests if this is a MO_Register operand.
Regular data dependence (aka true-dependence).
std::vector< MachineBasicBlock * >::iterator succ_iterator
Abstract Stack Frame Information.
ArrayRef< MCPhysReg > getOrder(const TargetRegisterClass *RC) const
unsigned getNumOperands() const
void StartBlock(MachineBasicBlock *BB)
Start - Initialize anti-dep breaking for a new basic block.
unsigned getNumRegs() const
Return the number of registers this target has (useful for sizing arrays holding per register informa...
unsigned BreakAntiDependencies(const std::vector< SUnit > &SUnits, MachineBasicBlock::iterator Begin, MachineBasicBlock::iterator End, unsigned InsertPosIndex, DbgValueVector &DbgValues)
bool hasExtraSrcRegAllocReq(QueryType Type=AnyInBundle) const
size_t size() const
size - Get the array size.
bool isDebugValue() const
bool isEarlyClobber() const
bundle_iterator< MachineInstr, instr_iterator > iterator
bool isReturn(QueryType Type=AnyInBundle) const
const MachineOperand & getOperand(unsigned i) const
CriticalAntiDepBreaker(MachineFunction &MFi, const RegisterClassInfo &)
succ_iterator succ_begin()
bool isRegMask() const
isRegMask - Tests if this is a MO_RegisterMask operand.
BitVector getPristineRegs(const MachineBasicBlock *MBB) const
bool test(unsigned Idx) const
bool hasExtraDefRegAllocReq(QueryType Type=AnyInBundle) const
bool isAllocatable(unsigned PhysReg) const
MachineFrameInfo * getFrameInfo()
raw_ostream & dbgs()
dbgs - Return a circular-buffered debug stream.
static bool clobbersPhysReg(const uint32_t *RegMask, unsigned PhysReg)
unsigned getDepth() const
bool isCall(QueryType Type=AnyInBundle) const
static const SDep * CriticalPathStep(const SUnit *SU)
~CriticalAntiDepBreaker()
unsigned getReg() const
getReg - Returns the register number.
std::vector< std::pair< MachineInstr *, MachineInstr * > > DbgValueVector
virtual bool isPredicated(const MachineInstr *MI) const
const TargetRegisterClass * getRegClass(const MCInstrDesc &TID, unsigned OpNum, const TargetRegisterInfo *TRI, const MachineFunction &MF) const
const MCRegisterInfo & MRI
bool isRegTiedToUseOperand(unsigned DefOpIdx, unsigned *UseOpIdx=0) const
SUnit - Scheduling unit. This is a node in the scheduling DAG.