15 #define DEBUG_TYPE "regalloc"
41 STATISTIC(NumStores,
"Number of stores added");
42 STATISTIC(NumLoads ,
"Number of loads added");
43 STATISTIC(NumCopies,
"Number of copies coalesced");
53 isBulkSpilling(
false) {}
74 unsigned short LastOpNum;
77 explicit LiveReg(
unsigned v)
78 : LastUse(0), VirtReg(v), PhysReg(0), LastOpNum(0), Dirty(
false) {}
80 unsigned getSparseSetIndex()
const {
89 LiveRegMap LiveVirtRegs;
114 std::vector<unsigned> PhysRegState;
121 UsedInInstrSet UsedInInstr;
124 void markRegUsedInInstr(
unsigned PhysReg) {
126 UsedInInstr.
insert(*Units);
130 bool isRegUsedInInstr(
unsigned PhysReg)
const {
132 if (UsedInInstr.count(*Units))
150 spillImpossible = ~0u
153 virtual const char *getPassName()
const {
154 return "Fast Register Allocator";
164 void AllocateBasicBlock();
170 void addKillFlag(
const LiveReg&);
171 void killVirtReg(LiveRegMap::iterator);
172 void killVirtReg(
unsigned VirtReg);
177 void definePhysReg(
MachineInstr *
MI,
unsigned PhysReg, RegState NewState);
178 unsigned calcSpillCost(
unsigned PhysReg)
const;
179 void assignVirtToPhysReg(LiveReg&,
unsigned PhysReg);
180 LiveRegMap::iterator findLiveVirtReg(
unsigned VirtReg) {
183 LiveRegMap::const_iterator findLiveVirtReg(
unsigned VirtReg)
const {
186 LiveRegMap::iterator assignVirtToPhysReg(
unsigned VReg,
unsigned PhysReg);
187 LiveRegMap::iterator allocVirtReg(
MachineInstr *
MI, LiveRegMap::iterator,
189 LiveRegMap::iterator defineVirtReg(
MachineInstr *
MI,
unsigned OpNum,
190 unsigned VirtReg,
unsigned Hint);
191 LiveRegMap::iterator reloadVirtReg(
MachineInstr *
MI,
unsigned OpNum,
192 unsigned VirtReg,
unsigned Hint);
194 bool setPhysReg(
MachineInstr *
MI,
unsigned OpNum,
unsigned PhysReg);
203 int SS = StackSlotForVirtReg[VirtReg];
208 int FrameIdx = MF->getFrameInfo()->CreateSpillStackObject(RC->
getSize(),
212 StackSlotForVirtReg[VirtReg] = FrameIdx;
222 if (StackSlotForVirtReg[MO.
getReg()] != -1)
229 return ++I ==
MRI->reg_nodbg_end();
233 void RAFast::addKillFlag(
const LiveReg &LR) {
234 if (!LR.LastUse)
return;
236 if (MO.
isUse() && !LR.LastUse->isRegTiedToDefOperand(LR.LastOpNum)) {
237 if (MO.
getReg() == LR.PhysReg)
240 LR.LastUse->addRegisterKilled(LR.PhysReg, TRI,
true);
245 void RAFast::killVirtReg(LiveRegMap::iterator LRI) {
247 assert(PhysRegState[LRI->PhysReg] == LRI->VirtReg &&
248 "Broken RegState mapping");
249 PhysRegState[LRI->PhysReg] = regFree;
252 LiveVirtRegs.erase(LRI);
256 void RAFast::killVirtReg(
unsigned VirtReg) {
258 "killVirtReg needs a virtual register");
259 LiveRegMap::iterator LRI = findLiveVirtReg(VirtReg);
260 if (LRI != LiveVirtRegs.end())
268 "Spilling a physical register is illegal!");
269 LiveRegMap::iterator LRI = findLiveVirtReg(VirtReg);
270 assert(LRI != LiveVirtRegs.end() &&
"Spilling unmapped virtual register");
271 spillVirtReg(MI, LRI);
276 LiveRegMap::iterator LRI) {
278 assert(PhysRegState[LR.PhysReg] == LRI->VirtReg &&
"Broken RegState mapping");
283 bool SpillKill = LR.LastUse !=
MI;
286 <<
" in " <<
PrintReg(LR.PhysReg, TRI));
288 int FI = getStackSpaceFor(LRI->VirtReg, RC);
289 DEBUG(
dbgs() <<
" to stack slot #" << FI <<
"\n");
297 LiveDbgValueMap[LRI->VirtReg];
298 for (
unsigned li = 0, le = LRIDbgValues.
size(); li != le; ++li) {
304 if (MI == MBB->end()) {
307 DL = (--EI)->getDebugLoc();
309 DL = MI->getDebugLoc();
315 DEBUG(
dbgs() <<
"Inserting debug info due to spill:" <<
"\n" << *NewDV);
320 LRIDbgValues.
clear();
329 if (LiveVirtRegs.empty())
return;
330 isBulkSpilling =
true;
333 for (LiveRegMap::iterator i = LiveVirtRegs.begin(), e = LiveVirtRegs.end();
336 LiveVirtRegs.clear();
337 isBulkSpilling =
false;
345 unsigned PhysReg = MO.
getReg();
347 "Bad usePhysReg operand");
348 markRegUsedInInstr(PhysReg);
349 switch (PhysRegState[PhysReg]) {
353 PhysRegState[PhysReg] = regFree;
366 unsigned Alias = *AI;
367 switch (PhysRegState[Alias]) {
371 assert(TRI->isSuperRegister(PhysReg, Alias) &&
372 "Instruction is not using a subregister of a reserved register");
374 PhysRegState[Alias] = regFree;
378 if (TRI->isSuperRegister(PhysReg, Alias)) {
384 PhysRegState[Alias] = regDisabled;
392 PhysRegState[PhysReg] = regFree;
399 void RAFast::definePhysReg(
MachineInstr *MI,
unsigned PhysReg,
401 markRegUsedInInstr(PhysReg);
402 switch (
unsigned VirtReg = PhysRegState[PhysReg]) {
406 spillVirtReg(MI, VirtReg);
410 PhysRegState[PhysReg] = NewState;
415 PhysRegState[PhysReg] = NewState;
417 unsigned Alias = *AI;
418 switch (
unsigned VirtReg = PhysRegState[Alias]) {
422 spillVirtReg(MI, VirtReg);
426 PhysRegState[Alias] = regDisabled;
427 if (TRI->isSuperRegister(PhysReg, Alias))
440 unsigned RAFast::calcSpillCost(
unsigned PhysReg)
const {
441 if (isRegUsedInInstr(PhysReg)) {
443 return spillImpossible;
445 switch (
unsigned VirtReg = PhysRegState[PhysReg]) {
452 <<
PrintReg(PhysReg, TRI) <<
" is reserved already.\n");
453 return spillImpossible;
455 LiveRegMap::const_iterator I = findLiveVirtReg(VirtReg);
456 assert(I != LiveVirtRegs.end() &&
"Missing VirtReg entry");
457 return I->Dirty ? spillDirty : spillClean;
465 unsigned Alias = *AI;
466 switch (
unsigned VirtReg = PhysRegState[Alias]) {
473 return spillImpossible;
475 LiveRegMap::const_iterator I = findLiveVirtReg(VirtReg);
476 assert(I != LiveVirtRegs.end() &&
"Missing VirtReg entry");
477 Cost += I->Dirty ? spillDirty : spillClean;
490 void RAFast::assignVirtToPhysReg(LiveReg &LR,
unsigned PhysReg) {
493 PhysRegState[PhysReg] = LR.VirtReg;
494 assert(!LR.PhysReg &&
"Already assigned a physreg");
495 LR.PhysReg = PhysReg;
499 RAFast::assignVirtToPhysReg(
unsigned VirtReg,
unsigned PhysReg) {
500 LiveRegMap::iterator LRI = findLiveVirtReg(VirtReg);
501 assert(LRI != LiveVirtRegs.end() &&
"VirtReg disappeared");
502 assignVirtToPhysReg(*LRI, PhysReg);
508 LiveRegMap::iterator LRI,
510 const unsigned VirtReg = LRI->VirtReg;
513 "Can only allocate virtual registers");
525 unsigned Cost = calcSpillCost(Hint);
526 if (Cost < spillDirty) {
528 definePhysReg(MI, Hint, regFree);
531 return assignVirtToPhysReg(VirtReg, Hint);
539 unsigned PhysReg = *
I;
540 if (PhysRegState[PhysReg] == regFree && !isRegUsedInInstr(PhysReg)) {
541 assignVirtToPhysReg(*LRI, PhysReg);
549 unsigned BestReg = 0, BestCost = spillImpossible;
551 unsigned Cost = calcSpillCost(*I);
553 DEBUG(
dbgs() <<
"\tCost: " << Cost <<
"\n");
554 DEBUG(
dbgs() <<
"\tBestCost: " << BestCost <<
"\n");
557 assignVirtToPhysReg(*LRI, *I);
561 BestReg = *
I, BestCost = Cost;
565 definePhysReg(MI, BestReg, regFree);
568 return assignVirtToPhysReg(VirtReg, BestReg);
573 MI->
emitError(
"inline assembly requires more registers than available");
575 MI->
emitError(
"ran out of registers during register allocation");
576 definePhysReg(MI, *AO.
begin(), regFree);
577 return assignVirtToPhysReg(VirtReg, *AO.
begin());
583 unsigned VirtReg,
unsigned Hint) {
585 "Not a virtual register");
586 LiveRegMap::iterator LRI;
588 tie(LRI, New) = LiveVirtRegs.insert(LiveReg(VirtReg));
592 MRI->hasOneNonDBGUse(VirtReg)) {
598 LRI = allocVirtReg(MI, LRI, Hint);
599 }
else if (LRI->LastUse) {
602 if (LRI->LastUse != MI || LRI->LastUse->
getOperand(LRI->LastOpNum).
isUse())
605 assert(LRI->PhysReg &&
"Register not assigned");
607 LRI->LastOpNum = OpNum;
609 markRegUsedInInstr(LRI->PhysReg);
616 unsigned VirtReg,
unsigned Hint) {
618 "Not a virtual register");
619 LiveRegMap::iterator LRI;
621 tie(LRI, New) = LiveVirtRegs.insert(LiveReg(VirtReg));
624 LRI = allocVirtReg(MI, LRI, Hint);
626 int FrameIndex = getStackSpaceFor(VirtReg, RC);
628 <<
PrintReg(LRI->PhysReg, TRI) <<
"\n");
631 }
else if (LRI->Dirty) {
632 if (isLastUseOfLocalReg(MO)) {
633 DEBUG(
dbgs() <<
"Killing last use: " << MO <<
"\n");
639 DEBUG(
dbgs() <<
"Clearing dubious kill: " << MO <<
"\n");
642 DEBUG(
dbgs() <<
"Clearing dubious dead: " << MO <<
"\n");
650 DEBUG(
dbgs() <<
"Clearing clean kill: " << MO <<
"\n");
653 DEBUG(
dbgs() <<
"Clearing clean dead: " << MO <<
"\n");
656 assert(LRI->PhysReg &&
"Register not assigned");
658 LRI->LastOpNum = OpNum;
659 markRegUsedInInstr(LRI->PhysReg);
666 bool RAFast::setPhysReg(
MachineInstr *MI,
unsigned OpNum,
unsigned PhysReg) {
697 DEBUG(
dbgs() <<
"Scanning for through registers:");
701 if (!MO.
isReg())
continue;
707 if (ThroughRegs.
insert(Reg))
714 DEBUG(
dbgs() <<
"\nChecking for physdef collisions.\n");
718 unsigned Reg = MO.
getReg();
720 markRegUsedInInstr(Reg);
722 if (ThroughRegs.
count(PhysRegState[*AI]))
723 definePhysReg(MI, *AI, regFree);
728 DEBUG(
dbgs() <<
"Allocating tied uses.\n");
731 if (!MO.
isReg())
continue;
732 unsigned Reg = MO.
getReg();
737 DEBUG(
dbgs() <<
"Operand " << i <<
"("<< MO <<
") is tied to operand "
739 LiveRegMap::iterator LRI = reloadVirtReg(MI, i, Reg, 0);
740 unsigned PhysReg = LRI->PhysReg;
741 setPhysReg(MI, i, PhysReg);
745 DEBUG(
dbgs() <<
"Partial redefine: " << MO <<
"\n");
748 LiveRegMap::iterator LRI = reloadVirtReg(MI, i, Reg, 0);
753 DEBUG(
dbgs() <<
"Allocating early clobbers.\n");
756 if (!MO.
isReg())
continue;
757 unsigned Reg = MO.
getReg();
762 LiveRegMap::iterator LRI = defineVirtReg(MI, i, Reg, 0);
763 unsigned PhysReg = LRI->PhysReg;
764 if (setPhysReg(MI, i, PhysReg))
773 unsigned Reg = MO.
getReg();
776 <<
" as used in instr\n");
777 markRegUsedInInstr(Reg);
781 for (
unsigned i = 0, e = PartialDefs.
size(); i != e; ++i)
782 markRegUsedInInstr(PartialDefs[i]);
785 void RAFast::AllocateBasicBlock() {
786 DEBUG(
dbgs() <<
"\nAllocating " << *MBB);
788 PhysRegState.assign(TRI->getNumRegs(), regDisabled);
789 assert(LiveVirtRegs.empty() &&
"Mapping not cleared from last block?");
796 if (
MRI->isAllocatable(*I))
797 definePhysReg(MII, *I, regReserved);
803 while (MII != MBB->
end()) {
807 dbgs() <<
"\n>> " << *MI <<
"Regs:";
808 for (
unsigned Reg = 1, E = TRI->getNumRegs(); Reg != E; ++
Reg) {
809 if (PhysRegState[Reg] == regDisabled)
continue;
810 dbgs() <<
" " << TRI->getName(Reg);
811 switch(PhysRegState[Reg]) {
819 LiveRegMap::iterator I = findLiveVirtReg(PhysRegState[Reg]);
820 assert(I != LiveVirtRegs.end() &&
"Missing VirtReg entry");
823 assert(I->PhysReg == Reg &&
"Bad inverse map");
830 for (LiveRegMap::iterator i = LiveVirtRegs.begin(),
831 e = LiveVirtRegs.end(); i != e; ++i) {
836 assert(PhysRegState[i->PhysReg] == i->VirtReg &&
"Bad inverse map");
842 bool ScanDbgValue =
true;
843 while (ScanDbgValue) {
844 ScanDbgValue =
false;
847 if (!MO.
isReg())
continue;
848 unsigned Reg = MO.
getReg();
850 LiveRegMap::iterator LRI = findLiveVirtReg(Reg);
851 if (LRI != LiveVirtRegs.end())
852 setPhysReg(MI, i, LRI->PhysReg);
854 int SS = StackSlotForVirtReg[
Reg];
857 DEBUG(
dbgs() <<
"Unable to allocate vreg used by DBG_VALUE");
871 DEBUG(
dbgs() <<
"Modifying debug info due to spill:"
879 LiveDbgValueMap[
Reg].push_back(MI);
887 unsigned CopySrc = 0, CopyDst = 0, CopySrcSub = 0, CopyDstSub = 0;
901 unsigned VirtOpEnd = 0;
902 bool hasTiedOps =
false;
903 bool hasEarlyClobbers =
false;
904 bool hasPartialRedefs =
false;
905 bool hasPhysDefs =
false;
913 if (!MO.
isReg())
continue;
914 unsigned Reg = MO.
getReg();
919 hasTiedOps = hasTiedOps ||
923 hasEarlyClobbers =
true;
925 hasPartialRedefs =
true;
929 if (!
MRI->isAllocatable(Reg))
continue;
934 regFree : regReserved);
935 hasEarlyClobbers =
true;
949 if (MI->
isInlineAsm() || hasEarlyClobbers || hasPartialRedefs ||
950 (hasTiedOps && (hasPhysDefs || MCID.
getNumDefs() > 1))) {
951 handleThroughOperands(MI, VirtDead);
956 hasEarlyClobbers =
true;
961 for (
unsigned i = 0; i != VirtOpEnd; ++i) {
963 if (!MO.
isReg())
continue;
964 unsigned Reg = MO.
getReg();
967 LiveRegMap::iterator LRI = reloadVirtReg(MI, i, Reg, CopyDst);
968 unsigned PhysReg = LRI->PhysReg;
969 CopySrc = (CopySrc == Reg || CopySrc == PhysReg) ? PhysReg : 0;
970 if (setPhysReg(MI, i, PhysReg))
975 for (UsedInInstrSet::iterator
976 I = UsedInInstr.begin(), E = UsedInInstr.end(); I != E; ++
I)
977 MRI->setRegUnitUsed(*I);
982 if (hasEarlyClobbers) {
985 if (!MO.
isReg())
continue;
986 unsigned Reg = MO.
getReg();
990 markRegUsedInInstr(Reg);
1000 DefOpEnd = VirtOpEnd;
1001 DEBUG(
dbgs() <<
" Spilling remaining registers before call.\n");
1006 SkippedInstrs.insert(&MCID);
1011 for (
unsigned i = 0; i != DefOpEnd; ++i) {
1015 unsigned Reg = MO.
getReg();
1018 if (!
MRI->isAllocatable(Reg))
continue;
1020 regFree : regReserved);
1023 LiveRegMap::iterator LRI = defineVirtReg(MI, i, Reg, CopySrc);
1024 unsigned PhysReg = LRI->PhysReg;
1025 if (setPhysReg(MI, i, PhysReg)) {
1029 CopyDst = (CopyDst == Reg || CopyDst == PhysReg) ? PhysReg : 0;
1036 for (
unsigned i = 0, e = VirtDead.
size(); i != e; ++i)
1037 killVirtReg(VirtDead[i]);
1040 for (UsedInInstrSet::iterator
1041 I = UsedInInstr.begin(), E = UsedInInstr.end(); I != E; ++
I)
1042 MRI->setRegUnitUsed(*I);
1044 if (CopyDst && CopyDst == CopySrc && CopyDstSub == CopySrcSub) {
1045 DEBUG(
dbgs() <<
"-- coalescing: " << *MI);
1053 DEBUG(
dbgs() <<
"Spilling live registers at end of block.\n");
1058 for (
unsigned i = 0, e = Coalesced.
size(); i != e; ++i)
1059 MBB->
erase(Coalesced[i]);
1060 NumCopies += Coalesced.
size();
1068 DEBUG(
dbgs() <<
"********** FAST REGISTER ALLOCATION **********\n"
1069 <<
"********** Function: " << Fn.
getName() <<
'\n');
1071 MRI = &MF->getRegInfo();
1073 TRI =
TM->getRegisterInfo();
1074 TII =
TM->getInstrInfo();
1075 MRI->freezeReservedRegs(Fn);
1076 RegClassInfo.runOnMachineFunction(Fn);
1077 UsedInInstr.clear();
1078 UsedInInstr.setUniverse(TRI->getNumRegUnits());
1080 assert(!
MRI->isSSA() &&
"regalloc requires leaving SSA");
1084 StackSlotForVirtReg.resize(
MRI->getNumVirtRegs());
1085 LiveVirtRegs.setUniverse(
MRI->getNumVirtRegs());
1089 MBBi != MBBe; ++MBBi) {
1091 AllocateBasicBlock();
1096 I = SkippedInstrs.begin(), E = SkippedInstrs.end(); I != E; ++
I)
1097 if (
const uint16_t *Defs = (*I)->getImplicitDefs())
1099 MRI->setPhysRegUsed(*Defs++);
1103 MRI->clearVirtRegs();
1105 SkippedInstrs.clear();
1106 StackSlotForVirtReg.clear();
1107 LiveDbgValueMap.clear();
1112 return new RAFast();
const MachineInstrBuilder & addMetadata(const MDNode *MD) const
void push_back(const T &Elt)
bool isRegTiedToDefOperand(unsigned UseOpIdx, unsigned *DefOpIdx=0) const
STATISTIC(NumStores,"Number of stores added")
instr_iterator erase(instr_iterator I)
MachineInstr * getParent()
static unsigned virtReg2Index(unsigned Reg)
bool isValid() const
isValid - returns true if this iterator is not yet at the end.
std::pair< iterator, bool > insert(const ValueT &Val)
unsigned getNumDefs() const
Return the number of MachineOperands that are register definitions. Register definitions always occur...
std::vector< unsigned >::const_iterator livein_iterator
iterator getFirstTerminator()
static bool isVirtualRegister(unsigned Reg)
bool readsVirtualRegister(unsigned Reg) const
const MCInstrDesc & getDesc() const
MDNode - a tuple of other values.
void setIsDead(bool Val=true)
const MDNode * getMetadata() const
livein_iterator livein_begin() const
const HexagonInstrInfo * TII
const char * getName() const
#define llvm_unreachable(msg)
bool isReg() const
isReg - Tests if this is a MO_Register operand.
enum LLVM_ENUM_INT_TYPE(uint32_t)
ID
LLVM Calling Convention Representation.
const MachineInstrBuilder & addImm(int64_t Val) const
unsigned getNumOperands() const
const MachineBasicBlock * getParent() const
bool isDebugValue() const
bool isEarlyClobber() const
bundle_iterator< MachineInstr, instr_iterator > iterator
unsigned getAlignment() const
const MCRegisterClass & getRegClass(unsigned i) const
Returns the register class associated with the enumeration value. See class MCOperandInfo.
livein_iterator livein_end() const
bool isIndirectDebugValue() const
const MachineOperand & getOperand(unsigned i) const
virtual void storeRegToStackSlot(MachineBasicBlock &MBB, MachineBasicBlock::iterator MBBI, unsigned SrcReg, bool isKill, int FrameIndex, const TargetRegisterClass *RC, const TargetRegisterInfo *TRI) const
MachineInstrBuilder BuildMI(MachineFunction &MF, DebugLoc DL, const MCInstrDesc &MCID)
unsigned getSubReg() const
MachineOperand & getOperand() const
void emitError(StringRef Msg) const
int getOperandConstraint(unsigned OpNum, MCOI::OperandConstraint Constraint) const
Returns the value of the specific constraint if it is set. Returns -1 if it is not set...
void setIsKill(bool Val=true)
bool isRegMask() const
isRegMask - Tests if this is a MO_RegisterMask operand.
SmallPtrSetIterator - This implements a const_iterator for SmallPtrSet.
const uint32_t * getRegMask() const
raw_ostream & dbgs()
dbgs - Return a circular-buffered debug stream.
virtual void loadRegFromStackSlot(MachineBasicBlock &MBB, MachineBasicBlock::iterator MBBI, unsigned DestReg, int FrameIndex, const TargetRegisterClass *RC, const TargetRegisterInfo *TRI) const
bool count(const T &V) const
count - Return true if the element is in the set.
static bool isPhysicalRegister(unsigned Reg)
static RegisterRegAlloc fastRegAlloc("fast","fast register allocator", createFastRegisterAllocator)
virtual void getAnalysisUsage(AnalysisUsage &AU) const
void setReg(unsigned Reg)
DBG_VALUE - a mapping of the llvm.dbg.value intrinsic.
bool isCall(QueryType Type=AnyInBundle) const
void setSubReg(unsigned subReg)
const TargetMachine & getTarget() const
unsigned getReg() const
getReg - Returns the register number.
FunctionPass * createFastRegisterAllocator()
BasicBlockListType::iterator iterator
void addRegisterDefined(unsigned Reg, const TargetRegisterInfo *RegInfo=0)
bool addRegisterKilled(unsigned IncomingReg, const TargetRegisterInfo *RegInfo, bool AddIfNotFound=false)
const MCRegisterInfo & MRI
StringRef getName() const
tier< T1, T2 > tie(T1 &f, T2 &s)
DebugLoc getDebugLoc() const
bool contains(unsigned Reg) const