LLVM API Documentation

 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Groups Pages
RegAllocFast.cpp
Go to the documentation of this file.
1 //===-- RegAllocFast.cpp - A fast register allocator for debug code -------===//
2 //
3 // The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 //
10 // This register allocator allocates registers to a basic block at a time,
11 // attempting to keep values in registers and reusing registers as appropriate.
12 //
13 //===----------------------------------------------------------------------===//
14 
15 #define DEBUG_TYPE "regalloc"
16 #include "llvm/CodeGen/Passes.h"
17 #include "llvm/ADT/DenseMap.h"
18 #include "llvm/ADT/IndexedMap.h"
19 #include "llvm/ADT/STLExtras.h"
20 #include "llvm/ADT/SmallSet.h"
21 #include "llvm/ADT/SmallVector.h"
22 #include "llvm/ADT/SparseSet.h"
23 #include "llvm/ADT/Statistic.h"
31 #include "llvm/IR/BasicBlock.h"
33 #include "llvm/Support/Debug.h"
38 #include <algorithm>
39 using namespace llvm;
40 
41 STATISTIC(NumStores, "Number of stores added");
42 STATISTIC(NumLoads , "Number of loads added");
43 STATISTIC(NumCopies, "Number of copies coalesced");
44 
45 static RegisterRegAlloc
46  fastRegAlloc("fast", "fast register allocator", createFastRegisterAllocator);
47 
48 namespace {
49  class RAFast : public MachineFunctionPass {
50  public:
51  static char ID;
52  RAFast() : MachineFunctionPass(ID), StackSlotForVirtReg(-1),
53  isBulkSpilling(false) {}
54  private:
55  const TargetMachine *TM;
56  MachineFunction *MF;
58  const TargetRegisterInfo *TRI;
59  const TargetInstrInfo *TII;
60  RegisterClassInfo RegClassInfo;
61 
62  // Basic block currently being allocated.
63  MachineBasicBlock *MBB;
64 
65  // StackSlotForVirtReg - Maps virtual regs to the frame index where these
66  // values are spilled.
67  IndexedMap<int, VirtReg2IndexFunctor> StackSlotForVirtReg;
68 
69  // Everything we know about a live virtual register.
70  struct LiveReg {
71  MachineInstr *LastUse; // Last instr to use reg.
72  unsigned VirtReg; // Virtual register number.
73  unsigned PhysReg; // Currently held here.
74  unsigned short LastOpNum; // OpNum on LastUse.
75  bool Dirty; // Register needs spill.
76 
77  explicit LiveReg(unsigned v)
78  : LastUse(0), VirtReg(v), PhysReg(0), LastOpNum(0), Dirty(false) {}
79 
80  unsigned getSparseSetIndex() const {
81  return TargetRegisterInfo::virtReg2Index(VirtReg);
82  }
83  };
84 
85  typedef SparseSet<LiveReg> LiveRegMap;
86 
87  // LiveVirtRegs - This map contains entries for each virtual register
88  // that is currently available in a physical register.
89  LiveRegMap LiveVirtRegs;
90 
92 
93  // RegState - Track the state of a physical register.
94  enum RegState {
95  // A disabled register is not available for allocation, but an alias may
96  // be in use. A register can only be moved out of the disabled state if
97  // all aliases are disabled.
98  regDisabled,
99 
100  // A free register is not currently in use and can be allocated
101  // immediately without checking aliases.
102  regFree,
103 
104  // A reserved register has been assigned explicitly (e.g., setting up a
105  // call parameter), and it remains reserved until it is used.
106  regReserved
107 
108  // A register state may also be a virtual register number, indication that
109  // the physical register is currently allocated to a virtual register. In
110  // that case, LiveVirtRegs contains the inverse mapping.
111  };
112 
113  // PhysRegState - One of the RegState enums, or a virtreg.
114  std::vector<unsigned> PhysRegState;
115 
116  // Set of register units.
117  typedef SparseSet<unsigned> UsedInInstrSet;
118 
119  // Set of register units that are used in the current instruction, and so
120  // cannot be allocated.
121  UsedInInstrSet UsedInInstr;
122 
123  // Mark a physreg as used in this instruction.
124  void markRegUsedInInstr(unsigned PhysReg) {
125  for (MCRegUnitIterator Units(PhysReg, TRI); Units.isValid(); ++Units)
126  UsedInInstr.insert(*Units);
127  }
128 
129  // Check if a physreg or any of its aliases are used in this instruction.
130  bool isRegUsedInInstr(unsigned PhysReg) const {
131  for (MCRegUnitIterator Units(PhysReg, TRI); Units.isValid(); ++Units)
132  if (UsedInInstr.count(*Units))
133  return true;
134  return false;
135  }
136 
137  // SkippedInstrs - Descriptors of instructions whose clobber list was
138  // ignored because all registers were spilled. It is still necessary to
139  // mark all the clobbered registers as used by the function.
141 
142  // isBulkSpilling - This flag is set when LiveRegMap will be cleared
143  // completely after spilling all live registers. LiveRegMap entries should
144  // not be erased.
145  bool isBulkSpilling;
146 
147  enum LLVM_ENUM_INT_TYPE(unsigned) {
148  spillClean = 1,
149  spillDirty = 100,
150  spillImpossible = ~0u
151  };
152  public:
153  virtual const char *getPassName() const {
154  return "Fast Register Allocator";
155  }
156 
157  virtual void getAnalysisUsage(AnalysisUsage &AU) const {
158  AU.setPreservesCFG();
160  }
161 
162  private:
163  bool runOnMachineFunction(MachineFunction &Fn);
164  void AllocateBasicBlock();
165  void handleThroughOperands(MachineInstr *MI,
166  SmallVectorImpl<unsigned> &VirtDead);
167  int getStackSpaceFor(unsigned VirtReg, const TargetRegisterClass *RC);
168  bool isLastUseOfLocalReg(MachineOperand&);
169 
170  void addKillFlag(const LiveReg&);
171  void killVirtReg(LiveRegMap::iterator);
172  void killVirtReg(unsigned VirtReg);
173  void spillVirtReg(MachineBasicBlock::iterator MI, LiveRegMap::iterator);
174  void spillVirtReg(MachineBasicBlock::iterator MI, unsigned VirtReg);
175 
176  void usePhysReg(MachineOperand&);
177  void definePhysReg(MachineInstr *MI, unsigned PhysReg, RegState NewState);
178  unsigned calcSpillCost(unsigned PhysReg) const;
179  void assignVirtToPhysReg(LiveReg&, unsigned PhysReg);
180  LiveRegMap::iterator findLiveVirtReg(unsigned VirtReg) {
181  return LiveVirtRegs.find(TargetRegisterInfo::virtReg2Index(VirtReg));
182  }
183  LiveRegMap::const_iterator findLiveVirtReg(unsigned VirtReg) const {
184  return LiveVirtRegs.find(TargetRegisterInfo::virtReg2Index(VirtReg));
185  }
186  LiveRegMap::iterator assignVirtToPhysReg(unsigned VReg, unsigned PhysReg);
187  LiveRegMap::iterator allocVirtReg(MachineInstr *MI, LiveRegMap::iterator,
188  unsigned Hint);
189  LiveRegMap::iterator defineVirtReg(MachineInstr *MI, unsigned OpNum,
190  unsigned VirtReg, unsigned Hint);
191  LiveRegMap::iterator reloadVirtReg(MachineInstr *MI, unsigned OpNum,
192  unsigned VirtReg, unsigned Hint);
193  void spillAll(MachineBasicBlock::iterator MI);
194  bool setPhysReg(MachineInstr *MI, unsigned OpNum, unsigned PhysReg);
195  };
196  char RAFast::ID = 0;
197 }
198 
199 /// getStackSpaceFor - This allocates space for the specified virtual register
200 /// to be held on the stack.
201 int RAFast::getStackSpaceFor(unsigned VirtReg, const TargetRegisterClass *RC) {
202  // Find the location Reg would belong...
203  int SS = StackSlotForVirtReg[VirtReg];
204  if (SS != -1)
205  return SS; // Already has space allocated?
206 
207  // Allocate a new stack object for this spill location...
208  int FrameIdx = MF->getFrameInfo()->CreateSpillStackObject(RC->getSize(),
209  RC->getAlignment());
210 
211  // Assign the slot.
212  StackSlotForVirtReg[VirtReg] = FrameIdx;
213  return FrameIdx;
214 }
215 
216 /// isLastUseOfLocalReg - Return true if MO is the only remaining reference to
217 /// its virtual register, and it is guaranteed to be a block-local register.
218 ///
219 bool RAFast::isLastUseOfLocalReg(MachineOperand &MO) {
220  // If the register has ever been spilled or reloaded, we conservatively assume
221  // it is a global register used in multiple blocks.
222  if (StackSlotForVirtReg[MO.getReg()] != -1)
223  return false;
224 
225  // Check that the use/def chain has exactly one operand - MO.
226  MachineRegisterInfo::reg_nodbg_iterator I = MRI->reg_nodbg_begin(MO.getReg());
227  if (&I.getOperand() != &MO)
228  return false;
229  return ++I == MRI->reg_nodbg_end();
230 }
231 
232 /// addKillFlag - Set kill flags on last use of a virtual register.
233 void RAFast::addKillFlag(const LiveReg &LR) {
234  if (!LR.LastUse) return;
235  MachineOperand &MO = LR.LastUse->getOperand(LR.LastOpNum);
236  if (MO.isUse() && !LR.LastUse->isRegTiedToDefOperand(LR.LastOpNum)) {
237  if (MO.getReg() == LR.PhysReg)
238  MO.setIsKill();
239  else
240  LR.LastUse->addRegisterKilled(LR.PhysReg, TRI, true);
241  }
242 }
243 
244 /// killVirtReg - Mark virtreg as no longer available.
245 void RAFast::killVirtReg(LiveRegMap::iterator LRI) {
246  addKillFlag(*LRI);
247  assert(PhysRegState[LRI->PhysReg] == LRI->VirtReg &&
248  "Broken RegState mapping");
249  PhysRegState[LRI->PhysReg] = regFree;
250  // Erase from LiveVirtRegs unless we're spilling in bulk.
251  if (!isBulkSpilling)
252  LiveVirtRegs.erase(LRI);
253 }
254 
255 /// killVirtReg - Mark virtreg as no longer available.
256 void RAFast::killVirtReg(unsigned VirtReg) {
257  assert(TargetRegisterInfo::isVirtualRegister(VirtReg) &&
258  "killVirtReg needs a virtual register");
259  LiveRegMap::iterator LRI = findLiveVirtReg(VirtReg);
260  if (LRI != LiveVirtRegs.end())
261  killVirtReg(LRI);
262 }
263 
264 /// spillVirtReg - This method spills the value specified by VirtReg into the
265 /// corresponding stack slot if needed.
266 void RAFast::spillVirtReg(MachineBasicBlock::iterator MI, unsigned VirtReg) {
267  assert(TargetRegisterInfo::isVirtualRegister(VirtReg) &&
268  "Spilling a physical register is illegal!");
269  LiveRegMap::iterator LRI = findLiveVirtReg(VirtReg);
270  assert(LRI != LiveVirtRegs.end() && "Spilling unmapped virtual register");
271  spillVirtReg(MI, LRI);
272 }
273 
274 /// spillVirtReg - Do the actual work of spilling.
275 void RAFast::spillVirtReg(MachineBasicBlock::iterator MI,
276  LiveRegMap::iterator LRI) {
277  LiveReg &LR = *LRI;
278  assert(PhysRegState[LR.PhysReg] == LRI->VirtReg && "Broken RegState mapping");
279 
280  if (LR.Dirty) {
281  // If this physreg is used by the instruction, we want to kill it on the
282  // instruction, not on the spill.
283  bool SpillKill = LR.LastUse != MI;
284  LR.Dirty = false;
285  DEBUG(dbgs() << "Spilling " << PrintReg(LRI->VirtReg, TRI)
286  << " in " << PrintReg(LR.PhysReg, TRI));
287  const TargetRegisterClass *RC = MRI->getRegClass(LRI->VirtReg);
288  int FI = getStackSpaceFor(LRI->VirtReg, RC);
289  DEBUG(dbgs() << " to stack slot #" << FI << "\n");
290  TII->storeRegToStackSlot(*MBB, MI, LR.PhysReg, SpillKill, FI, RC, TRI);
291  ++NumStores; // Update statistics
292 
293  // If this register is used by DBG_VALUE then insert new DBG_VALUE to
294  // identify spilled location as the place to find corresponding variable's
295  // value.
296  SmallVectorImpl<MachineInstr *> &LRIDbgValues =
297  LiveDbgValueMap[LRI->VirtReg];
298  for (unsigned li = 0, le = LRIDbgValues.size(); li != le; ++li) {
299  MachineInstr *DBG = LRIDbgValues[li];
300  const MDNode *MDPtr = DBG->getOperand(2).getMetadata();
301  bool IsIndirect = DBG->isIndirectDebugValue();
302  uint64_t Offset = IsIndirect ? DBG->getOperand(1).getImm() : 0;
303  DebugLoc DL;
304  if (MI == MBB->end()) {
305  // If MI is at basic block end then use last instruction's location.
307  DL = (--EI)->getDebugLoc();
308  } else
309  DL = MI->getDebugLoc();
310  MachineBasicBlock *MBB = DBG->getParent();
311  MachineInstr *NewDV =
312  BuildMI(*MBB, MI, DL, TII->get(TargetOpcode::DBG_VALUE))
313  .addFrameIndex(FI).addImm(Offset).addMetadata(MDPtr);
314  (void)NewDV;
315  DEBUG(dbgs() << "Inserting debug info due to spill:" << "\n" << *NewDV);
316  }
317  // Now this register is spilled there is should not be any DBG_VALUE
318  // pointing to this register because they are all pointing to spilled value
319  // now.
320  LRIDbgValues.clear();
321  if (SpillKill)
322  LR.LastUse = 0; // Don't kill register again
323  }
324  killVirtReg(LRI);
325 }
326 
327 /// spillAll - Spill all dirty virtregs without killing them.
328 void RAFast::spillAll(MachineBasicBlock::iterator MI) {
329  if (LiveVirtRegs.empty()) return;
330  isBulkSpilling = true;
331  // The LiveRegMap is keyed by an unsigned (the virtreg number), so the order
332  // of spilling here is deterministic, if arbitrary.
333  for (LiveRegMap::iterator i = LiveVirtRegs.begin(), e = LiveVirtRegs.end();
334  i != e; ++i)
335  spillVirtReg(MI, i);
336  LiveVirtRegs.clear();
337  isBulkSpilling = false;
338 }
339 
340 /// usePhysReg - Handle the direct use of a physical register.
341 /// Check that the register is not used by a virtreg.
342 /// Kill the physreg, marking it free.
343 /// This may add implicit kills to MO->getParent() and invalidate MO.
344 void RAFast::usePhysReg(MachineOperand &MO) {
345  unsigned PhysReg = MO.getReg();
346  assert(TargetRegisterInfo::isPhysicalRegister(PhysReg) &&
347  "Bad usePhysReg operand");
348  markRegUsedInInstr(PhysReg);
349  switch (PhysRegState[PhysReg]) {
350  case regDisabled:
351  break;
352  case regReserved:
353  PhysRegState[PhysReg] = regFree;
354  // Fall through
355  case regFree:
356  MO.setIsKill();
357  return;
358  default:
359  // The physreg was allocated to a virtual register. That means the value we
360  // wanted has been clobbered.
361  llvm_unreachable("Instruction uses an allocated register");
362  }
363 
364  // Maybe a superregister is reserved?
365  for (MCRegAliasIterator AI(PhysReg, TRI, false); AI.isValid(); ++AI) {
366  unsigned Alias = *AI;
367  switch (PhysRegState[Alias]) {
368  case regDisabled:
369  break;
370  case regReserved:
371  assert(TRI->isSuperRegister(PhysReg, Alias) &&
372  "Instruction is not using a subregister of a reserved register");
373  // Leave the superregister in the working set.
374  PhysRegState[Alias] = regFree;
375  MO.getParent()->addRegisterKilled(Alias, TRI, true);
376  return;
377  case regFree:
378  if (TRI->isSuperRegister(PhysReg, Alias)) {
379  // Leave the superregister in the working set.
380  MO.getParent()->addRegisterKilled(Alias, TRI, true);
381  return;
382  }
383  // Some other alias was in the working set - clear it.
384  PhysRegState[Alias] = regDisabled;
385  break;
386  default:
387  llvm_unreachable("Instruction uses an alias of an allocated register");
388  }
389  }
390 
391  // All aliases are disabled, bring register into working set.
392  PhysRegState[PhysReg] = regFree;
393  MO.setIsKill();
394 }
395 
396 /// definePhysReg - Mark PhysReg as reserved or free after spilling any
397 /// virtregs. This is very similar to defineVirtReg except the physreg is
398 /// reserved instead of allocated.
399 void RAFast::definePhysReg(MachineInstr *MI, unsigned PhysReg,
400  RegState NewState) {
401  markRegUsedInInstr(PhysReg);
402  switch (unsigned VirtReg = PhysRegState[PhysReg]) {
403  case regDisabled:
404  break;
405  default:
406  spillVirtReg(MI, VirtReg);
407  // Fall through.
408  case regFree:
409  case regReserved:
410  PhysRegState[PhysReg] = NewState;
411  return;
412  }
413 
414  // This is a disabled register, disable all aliases.
415  PhysRegState[PhysReg] = NewState;
416  for (MCRegAliasIterator AI(PhysReg, TRI, false); AI.isValid(); ++AI) {
417  unsigned Alias = *AI;
418  switch (unsigned VirtReg = PhysRegState[Alias]) {
419  case regDisabled:
420  break;
421  default:
422  spillVirtReg(MI, VirtReg);
423  // Fall through.
424  case regFree:
425  case regReserved:
426  PhysRegState[Alias] = regDisabled;
427  if (TRI->isSuperRegister(PhysReg, Alias))
428  return;
429  break;
430  }
431  }
432 }
433 
434 
435 // calcSpillCost - Return the cost of spilling clearing out PhysReg and
436 // aliases so it is free for allocation.
437 // Returns 0 when PhysReg is free or disabled with all aliases disabled - it
438 // can be allocated directly.
439 // Returns spillImpossible when PhysReg or an alias can't be spilled.
440 unsigned RAFast::calcSpillCost(unsigned PhysReg) const {
441  if (isRegUsedInInstr(PhysReg)) {
442  DEBUG(dbgs() << PrintReg(PhysReg, TRI) << " is already used in instr.\n");
443  return spillImpossible;
444  }
445  switch (unsigned VirtReg = PhysRegState[PhysReg]) {
446  case regDisabled:
447  break;
448  case regFree:
449  return 0;
450  case regReserved:
451  DEBUG(dbgs() << PrintReg(VirtReg, TRI) << " corresponding "
452  << PrintReg(PhysReg, TRI) << " is reserved already.\n");
453  return spillImpossible;
454  default: {
455  LiveRegMap::const_iterator I = findLiveVirtReg(VirtReg);
456  assert(I != LiveVirtRegs.end() && "Missing VirtReg entry");
457  return I->Dirty ? spillDirty : spillClean;
458  }
459  }
460 
461  // This is a disabled register, add up cost of aliases.
462  DEBUG(dbgs() << PrintReg(PhysReg, TRI) << " is disabled.\n");
463  unsigned Cost = 0;
464  for (MCRegAliasIterator AI(PhysReg, TRI, false); AI.isValid(); ++AI) {
465  unsigned Alias = *AI;
466  switch (unsigned VirtReg = PhysRegState[Alias]) {
467  case regDisabled:
468  break;
469  case regFree:
470  ++Cost;
471  break;
472  case regReserved:
473  return spillImpossible;
474  default: {
475  LiveRegMap::const_iterator I = findLiveVirtReg(VirtReg);
476  assert(I != LiveVirtRegs.end() && "Missing VirtReg entry");
477  Cost += I->Dirty ? spillDirty : spillClean;
478  break;
479  }
480  }
481  }
482  return Cost;
483 }
484 
485 
486 /// assignVirtToPhysReg - This method updates local state so that we know
487 /// that PhysReg is the proper container for VirtReg now. The physical
488 /// register must not be used for anything else when this is called.
489 ///
490 void RAFast::assignVirtToPhysReg(LiveReg &LR, unsigned PhysReg) {
491  DEBUG(dbgs() << "Assigning " << PrintReg(LR.VirtReg, TRI) << " to "
492  << PrintReg(PhysReg, TRI) << "\n");
493  PhysRegState[PhysReg] = LR.VirtReg;
494  assert(!LR.PhysReg && "Already assigned a physreg");
495  LR.PhysReg = PhysReg;
496 }
497 
499 RAFast::assignVirtToPhysReg(unsigned VirtReg, unsigned PhysReg) {
500  LiveRegMap::iterator LRI = findLiveVirtReg(VirtReg);
501  assert(LRI != LiveVirtRegs.end() && "VirtReg disappeared");
502  assignVirtToPhysReg(*LRI, PhysReg);
503  return LRI;
504 }
505 
506 /// allocVirtReg - Allocate a physical register for VirtReg.
507 RAFast::LiveRegMap::iterator RAFast::allocVirtReg(MachineInstr *MI,
508  LiveRegMap::iterator LRI,
509  unsigned Hint) {
510  const unsigned VirtReg = LRI->VirtReg;
511 
512  assert(TargetRegisterInfo::isVirtualRegister(VirtReg) &&
513  "Can only allocate virtual registers");
514 
515  const TargetRegisterClass *RC = MRI->getRegClass(VirtReg);
516 
517  // Ignore invalid hints.
518  if (Hint && (!TargetRegisterInfo::isPhysicalRegister(Hint) ||
519  !RC->contains(Hint) || !MRI->isAllocatable(Hint)))
520  Hint = 0;
521 
522  // Take hint when possible.
523  if (Hint) {
524  // Ignore the hint if we would have to spill a dirty register.
525  unsigned Cost = calcSpillCost(Hint);
526  if (Cost < spillDirty) {
527  if (Cost)
528  definePhysReg(MI, Hint, regFree);
529  // definePhysReg may kill virtual registers and modify LiveVirtRegs.
530  // That invalidates LRI, so run a new lookup for VirtReg.
531  return assignVirtToPhysReg(VirtReg, Hint);
532  }
533  }
534 
535  ArrayRef<MCPhysReg> AO = RegClassInfo.getOrder(RC);
536 
537  // First try to find a completely free register.
538  for (ArrayRef<MCPhysReg>::iterator I = AO.begin(), E = AO.end(); I != E; ++I){
539  unsigned PhysReg = *I;
540  if (PhysRegState[PhysReg] == regFree && !isRegUsedInInstr(PhysReg)) {
541  assignVirtToPhysReg(*LRI, PhysReg);
542  return LRI;
543  }
544  }
545 
546  DEBUG(dbgs() << "Allocating " << PrintReg(VirtReg) << " from "
547  << RC->getName() << "\n");
548 
549  unsigned BestReg = 0, BestCost = spillImpossible;
550  for (ArrayRef<MCPhysReg>::iterator I = AO.begin(), E = AO.end(); I != E; ++I){
551  unsigned Cost = calcSpillCost(*I);
552  DEBUG(dbgs() << "\tRegister: " << PrintReg(*I, TRI) << "\n");
553  DEBUG(dbgs() << "\tCost: " << Cost << "\n");
554  DEBUG(dbgs() << "\tBestCost: " << BestCost << "\n");
555  // Cost is 0 when all aliases are already disabled.
556  if (Cost == 0) {
557  assignVirtToPhysReg(*LRI, *I);
558  return LRI;
559  }
560  if (Cost < BestCost)
561  BestReg = *I, BestCost = Cost;
562  }
563 
564  if (BestReg) {
565  definePhysReg(MI, BestReg, regFree);
566  // definePhysReg may kill virtual registers and modify LiveVirtRegs.
567  // That invalidates LRI, so run a new lookup for VirtReg.
568  return assignVirtToPhysReg(VirtReg, BestReg);
569  }
570 
571  // Nothing we can do. Report an error and keep going with a bad allocation.
572  if (MI->isInlineAsm())
573  MI->emitError("inline assembly requires more registers than available");
574  else
575  MI->emitError("ran out of registers during register allocation");
576  definePhysReg(MI, *AO.begin(), regFree);
577  return assignVirtToPhysReg(VirtReg, *AO.begin());
578 }
579 
580 /// defineVirtReg - Allocate a register for VirtReg and mark it as dirty.
582 RAFast::defineVirtReg(MachineInstr *MI, unsigned OpNum,
583  unsigned VirtReg, unsigned Hint) {
584  assert(TargetRegisterInfo::isVirtualRegister(VirtReg) &&
585  "Not a virtual register");
586  LiveRegMap::iterator LRI;
587  bool New;
588  tie(LRI, New) = LiveVirtRegs.insert(LiveReg(VirtReg));
589  if (New) {
590  // If there is no hint, peek at the only use of this register.
591  if ((!Hint || !TargetRegisterInfo::isPhysicalRegister(Hint)) &&
592  MRI->hasOneNonDBGUse(VirtReg)) {
593  const MachineInstr &UseMI = *MRI->use_nodbg_begin(VirtReg);
594  // It's a copy, use the destination register as a hint.
595  if (UseMI.isCopyLike())
596  Hint = UseMI.getOperand(0).getReg();
597  }
598  LRI = allocVirtReg(MI, LRI, Hint);
599  } else if (LRI->LastUse) {
600  // Redefining a live register - kill at the last use, unless it is this
601  // instruction defining VirtReg multiple times.
602  if (LRI->LastUse != MI || LRI->LastUse->getOperand(LRI->LastOpNum).isUse())
603  addKillFlag(*LRI);
604  }
605  assert(LRI->PhysReg && "Register not assigned");
606  LRI->LastUse = MI;
607  LRI->LastOpNum = OpNum;
608  LRI->Dirty = true;
609  markRegUsedInInstr(LRI->PhysReg);
610  return LRI;
611 }
612 
613 /// reloadVirtReg - Make sure VirtReg is available in a physreg and return it.
615 RAFast::reloadVirtReg(MachineInstr *MI, unsigned OpNum,
616  unsigned VirtReg, unsigned Hint) {
617  assert(TargetRegisterInfo::isVirtualRegister(VirtReg) &&
618  "Not a virtual register");
619  LiveRegMap::iterator LRI;
620  bool New;
621  tie(LRI, New) = LiveVirtRegs.insert(LiveReg(VirtReg));
622  MachineOperand &MO = MI->getOperand(OpNum);
623  if (New) {
624  LRI = allocVirtReg(MI, LRI, Hint);
625  const TargetRegisterClass *RC = MRI->getRegClass(VirtReg);
626  int FrameIndex = getStackSpaceFor(VirtReg, RC);
627  DEBUG(dbgs() << "Reloading " << PrintReg(VirtReg, TRI) << " into "
628  << PrintReg(LRI->PhysReg, TRI) << "\n");
629  TII->loadRegFromStackSlot(*MBB, MI, LRI->PhysReg, FrameIndex, RC, TRI);
630  ++NumLoads;
631  } else if (LRI->Dirty) {
632  if (isLastUseOfLocalReg(MO)) {
633  DEBUG(dbgs() << "Killing last use: " << MO << "\n");
634  if (MO.isUse())
635  MO.setIsKill();
636  else
637  MO.setIsDead();
638  } else if (MO.isKill()) {
639  DEBUG(dbgs() << "Clearing dubious kill: " << MO << "\n");
640  MO.setIsKill(false);
641  } else if (MO.isDead()) {
642  DEBUG(dbgs() << "Clearing dubious dead: " << MO << "\n");
643  MO.setIsDead(false);
644  }
645  } else if (MO.isKill()) {
646  // We must remove kill flags from uses of reloaded registers because the
647  // register would be killed immediately, and there might be a second use:
648  // %foo = OR %x<kill>, %x
649  // This would cause a second reload of %x into a different register.
650  DEBUG(dbgs() << "Clearing clean kill: " << MO << "\n");
651  MO.setIsKill(false);
652  } else if (MO.isDead()) {
653  DEBUG(dbgs() << "Clearing clean dead: " << MO << "\n");
654  MO.setIsDead(false);
655  }
656  assert(LRI->PhysReg && "Register not assigned");
657  LRI->LastUse = MI;
658  LRI->LastOpNum = OpNum;
659  markRegUsedInInstr(LRI->PhysReg);
660  return LRI;
661 }
662 
663 // setPhysReg - Change operand OpNum in MI the refer the PhysReg, considering
664 // subregs. This may invalidate any operand pointers.
665 // Return true if the operand kills its register.
666 bool RAFast::setPhysReg(MachineInstr *MI, unsigned OpNum, unsigned PhysReg) {
667  MachineOperand &MO = MI->getOperand(OpNum);
668  bool Dead = MO.isDead();
669  if (!MO.getSubReg()) {
670  MO.setReg(PhysReg);
671  return MO.isKill() || Dead;
672  }
673 
674  // Handle subregister index.
675  MO.setReg(PhysReg ? TRI->getSubReg(PhysReg, MO.getSubReg()) : 0);
676  MO.setSubReg(0);
677 
678  // A kill flag implies killing the full register. Add corresponding super
679  // register kill.
680  if (MO.isKill()) {
681  MI->addRegisterKilled(PhysReg, TRI, true);
682  return true;
683  }
684 
685  // A <def,read-undef> of a sub-register requires an implicit def of the full
686  // register.
687  if (MO.isDef() && MO.isUndef())
688  MI->addRegisterDefined(PhysReg, TRI);
689 
690  return Dead;
691 }
692 
693 // Handle special instruction operand like early clobbers and tied ops when
694 // there are additional physreg defines.
695 void RAFast::handleThroughOperands(MachineInstr *MI,
696  SmallVectorImpl<unsigned> &VirtDead) {
697  DEBUG(dbgs() << "Scanning for through registers:");
698  SmallSet<unsigned, 8> ThroughRegs;
699  for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
700  MachineOperand &MO = MI->getOperand(i);
701  if (!MO.isReg()) continue;
702  unsigned Reg = MO.getReg();
704  continue;
705  if (MO.isEarlyClobber() || MI->isRegTiedToDefOperand(i) ||
706  (MO.getSubReg() && MI->readsVirtualRegister(Reg))) {
707  if (ThroughRegs.insert(Reg))
708  DEBUG(dbgs() << ' ' << PrintReg(Reg));
709  }
710  }
711 
712  // If any physreg defines collide with preallocated through registers,
713  // we must spill and reallocate.
714  DEBUG(dbgs() << "\nChecking for physdef collisions.\n");
715  for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
716  MachineOperand &MO = MI->getOperand(i);
717  if (!MO.isReg() || !MO.isDef()) continue;
718  unsigned Reg = MO.getReg();
719  if (!Reg || !TargetRegisterInfo::isPhysicalRegister(Reg)) continue;
720  markRegUsedInInstr(Reg);
721  for (MCRegAliasIterator AI(Reg, TRI, true); AI.isValid(); ++AI) {
722  if (ThroughRegs.count(PhysRegState[*AI]))
723  definePhysReg(MI, *AI, regFree);
724  }
725  }
726 
727  SmallVector<unsigned, 8> PartialDefs;
728  DEBUG(dbgs() << "Allocating tied uses.\n");
729  for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
730  MachineOperand &MO = MI->getOperand(i);
731  if (!MO.isReg()) continue;
732  unsigned Reg = MO.getReg();
733  if (!TargetRegisterInfo::isVirtualRegister(Reg)) continue;
734  if (MO.isUse()) {
735  unsigned DefIdx = 0;
736  if (!MI->isRegTiedToDefOperand(i, &DefIdx)) continue;
737  DEBUG(dbgs() << "Operand " << i << "("<< MO << ") is tied to operand "
738  << DefIdx << ".\n");
739  LiveRegMap::iterator LRI = reloadVirtReg(MI, i, Reg, 0);
740  unsigned PhysReg = LRI->PhysReg;
741  setPhysReg(MI, i, PhysReg);
742  // Note: we don't update the def operand yet. That would cause the normal
743  // def-scan to attempt spilling.
744  } else if (MO.getSubReg() && MI->readsVirtualRegister(Reg)) {
745  DEBUG(dbgs() << "Partial redefine: " << MO << "\n");
746  // Reload the register, but don't assign to the operand just yet.
747  // That would confuse the later phys-def processing pass.
748  LiveRegMap::iterator LRI = reloadVirtReg(MI, i, Reg, 0);
749  PartialDefs.push_back(LRI->PhysReg);
750  }
751  }
752 
753  DEBUG(dbgs() << "Allocating early clobbers.\n");
754  for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
755  MachineOperand &MO = MI->getOperand(i);
756  if (!MO.isReg()) continue;
757  unsigned Reg = MO.getReg();
758  if (!TargetRegisterInfo::isVirtualRegister(Reg)) continue;
759  if (!MO.isEarlyClobber())
760  continue;
761  // Note: defineVirtReg may invalidate MO.
762  LiveRegMap::iterator LRI = defineVirtReg(MI, i, Reg, 0);
763  unsigned PhysReg = LRI->PhysReg;
764  if (setPhysReg(MI, i, PhysReg))
765  VirtDead.push_back(Reg);
766  }
767 
768  // Restore UsedInInstr to a state usable for allocating normal virtual uses.
769  UsedInInstr.clear();
770  for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
771  MachineOperand &MO = MI->getOperand(i);
772  if (!MO.isReg() || (MO.isDef() && !MO.isEarlyClobber())) continue;
773  unsigned Reg = MO.getReg();
774  if (!Reg || !TargetRegisterInfo::isPhysicalRegister(Reg)) continue;
775  DEBUG(dbgs() << "\tSetting " << PrintReg(Reg, TRI)
776  << " as used in instr\n");
777  markRegUsedInInstr(Reg);
778  }
779 
780  // Also mark PartialDefs as used to avoid reallocation.
781  for (unsigned i = 0, e = PartialDefs.size(); i != e; ++i)
782  markRegUsedInInstr(PartialDefs[i]);
783 }
784 
785 void RAFast::AllocateBasicBlock() {
786  DEBUG(dbgs() << "\nAllocating " << *MBB);
787 
788  PhysRegState.assign(TRI->getNumRegs(), regDisabled);
789  assert(LiveVirtRegs.empty() && "Mapping not cleared from last block?");
790 
792 
793  // Add live-in registers as live.
795  E = MBB->livein_end(); I != E; ++I)
796  if (MRI->isAllocatable(*I))
797  definePhysReg(MII, *I, regReserved);
798 
799  SmallVector<unsigned, 8> VirtDead;
801 
802  // Otherwise, sequentially allocate each instruction in the MBB.
803  while (MII != MBB->end()) {
804  MachineInstr *MI = MII++;
805  const MCInstrDesc &MCID = MI->getDesc();
806  DEBUG({
807  dbgs() << "\n>> " << *MI << "Regs:";
808  for (unsigned Reg = 1, E = TRI->getNumRegs(); Reg != E; ++Reg) {
809  if (PhysRegState[Reg] == regDisabled) continue;
810  dbgs() << " " << TRI->getName(Reg);
811  switch(PhysRegState[Reg]) {
812  case regFree:
813  break;
814  case regReserved:
815  dbgs() << "*";
816  break;
817  default: {
818  dbgs() << '=' << PrintReg(PhysRegState[Reg]);
819  LiveRegMap::iterator I = findLiveVirtReg(PhysRegState[Reg]);
820  assert(I != LiveVirtRegs.end() && "Missing VirtReg entry");
821  if (I->Dirty)
822  dbgs() << "*";
823  assert(I->PhysReg == Reg && "Bad inverse map");
824  break;
825  }
826  }
827  }
828  dbgs() << '\n';
829  // Check that LiveVirtRegs is the inverse.
830  for (LiveRegMap::iterator i = LiveVirtRegs.begin(),
831  e = LiveVirtRegs.end(); i != e; ++i) {
832  assert(TargetRegisterInfo::isVirtualRegister(i->VirtReg) &&
833  "Bad map key");
834  assert(TargetRegisterInfo::isPhysicalRegister(i->PhysReg) &&
835  "Bad map value");
836  assert(PhysRegState[i->PhysReg] == i->VirtReg && "Bad inverse map");
837  }
838  });
839 
840  // Debug values are not allowed to change codegen in any way.
841  if (MI->isDebugValue()) {
842  bool ScanDbgValue = true;
843  while (ScanDbgValue) {
844  ScanDbgValue = false;
845  for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
846  MachineOperand &MO = MI->getOperand(i);
847  if (!MO.isReg()) continue;
848  unsigned Reg = MO.getReg();
849  if (!TargetRegisterInfo::isVirtualRegister(Reg)) continue;
850  LiveRegMap::iterator LRI = findLiveVirtReg(Reg);
851  if (LRI != LiveVirtRegs.end())
852  setPhysReg(MI, i, LRI->PhysReg);
853  else {
854  int SS = StackSlotForVirtReg[Reg];
855  if (SS == -1) {
856  // We can't allocate a physreg for a DebugValue, sorry!
857  DEBUG(dbgs() << "Unable to allocate vreg used by DBG_VALUE");
858  MO.setReg(0);
859  }
860  else {
861  // Modify DBG_VALUE now that the value is in a spill slot.
862  bool IsIndirect = MI->isIndirectDebugValue();
863  uint64_t Offset = IsIndirect ? MI->getOperand(1).getImm() : 0;
864  const MDNode *MDPtr =
865  MI->getOperand(MI->getNumOperands()-1).getMetadata();
866  DebugLoc DL = MI->getDebugLoc();
867  MachineBasicBlock *MBB = MI->getParent();
868  MachineInstr *NewDV = BuildMI(*MBB, MBB->erase(MI), DL,
870  .addFrameIndex(SS).addImm(Offset).addMetadata(MDPtr);
871  DEBUG(dbgs() << "Modifying debug info due to spill:"
872  << "\t" << *NewDV);
873  // Scan NewDV operands from the beginning.
874  MI = NewDV;
875  ScanDbgValue = true;
876  break;
877  }
878  }
879  LiveDbgValueMap[Reg].push_back(MI);
880  }
881  }
882  // Next instruction.
883  continue;
884  }
885 
886  // If this is a copy, we may be able to coalesce.
887  unsigned CopySrc = 0, CopyDst = 0, CopySrcSub = 0, CopyDstSub = 0;
888  if (MI->isCopy()) {
889  CopyDst = MI->getOperand(0).getReg();
890  CopySrc = MI->getOperand(1).getReg();
891  CopyDstSub = MI->getOperand(0).getSubReg();
892  CopySrcSub = MI->getOperand(1).getSubReg();
893  }
894 
895  // Track registers used by instruction.
896  UsedInInstr.clear();
897 
898  // First scan.
899  // Mark physreg uses and early clobbers as used.
900  // Find the end of the virtreg operands
901  unsigned VirtOpEnd = 0;
902  bool hasTiedOps = false;
903  bool hasEarlyClobbers = false;
904  bool hasPartialRedefs = false;
905  bool hasPhysDefs = false;
906  for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
907  MachineOperand &MO = MI->getOperand(i);
908  // Make sure MRI knows about registers clobbered by regmasks.
909  if (MO.isRegMask()) {
910  MRI->addPhysRegsUsedFromRegMask(MO.getRegMask());
911  continue;
912  }
913  if (!MO.isReg()) continue;
914  unsigned Reg = MO.getReg();
915  if (!Reg) continue;
917  VirtOpEnd = i+1;
918  if (MO.isUse()) {
919  hasTiedOps = hasTiedOps ||
920  MCID.getOperandConstraint(i, MCOI::TIED_TO) != -1;
921  } else {
922  if (MO.isEarlyClobber())
923  hasEarlyClobbers = true;
924  if (MO.getSubReg() && MI->readsVirtualRegister(Reg))
925  hasPartialRedefs = true;
926  }
927  continue;
928  }
929  if (!MRI->isAllocatable(Reg)) continue;
930  if (MO.isUse()) {
931  usePhysReg(MO);
932  } else if (MO.isEarlyClobber()) {
933  definePhysReg(MI, Reg, (MO.isImplicit() || MO.isDead()) ?
934  regFree : regReserved);
935  hasEarlyClobbers = true;
936  } else
937  hasPhysDefs = true;
938  }
939 
940  // The instruction may have virtual register operands that must be allocated
941  // the same register at use-time and def-time: early clobbers and tied
942  // operands. If there are also physical defs, these registers must avoid
943  // both physical defs and uses, making them more constrained than normal
944  // operands.
945  // Similarly, if there are multiple defs and tied operands, we must make
946  // sure the same register is allocated to uses and defs.
947  // We didn't detect inline asm tied operands above, so just make this extra
948  // pass for all inline asm.
949  if (MI->isInlineAsm() || hasEarlyClobbers || hasPartialRedefs ||
950  (hasTiedOps && (hasPhysDefs || MCID.getNumDefs() > 1))) {
951  handleThroughOperands(MI, VirtDead);
952  // Don't attempt coalescing when we have funny stuff going on.
953  CopyDst = 0;
954  // Pretend we have early clobbers so the use operands get marked below.
955  // This is not necessary for the common case of a single tied use.
956  hasEarlyClobbers = true;
957  }
958 
959  // Second scan.
960  // Allocate virtreg uses.
961  for (unsigned i = 0; i != VirtOpEnd; ++i) {
962  MachineOperand &MO = MI->getOperand(i);
963  if (!MO.isReg()) continue;
964  unsigned Reg = MO.getReg();
965  if (!TargetRegisterInfo::isVirtualRegister(Reg)) continue;
966  if (MO.isUse()) {
967  LiveRegMap::iterator LRI = reloadVirtReg(MI, i, Reg, CopyDst);
968  unsigned PhysReg = LRI->PhysReg;
969  CopySrc = (CopySrc == Reg || CopySrc == PhysReg) ? PhysReg : 0;
970  if (setPhysReg(MI, i, PhysReg))
971  killVirtReg(LRI);
972  }
973  }
974 
975  for (UsedInInstrSet::iterator
976  I = UsedInInstr.begin(), E = UsedInInstr.end(); I != E; ++I)
977  MRI->setRegUnitUsed(*I);
978 
979  // Track registers defined by instruction - early clobbers and tied uses at
980  // this point.
981  UsedInInstr.clear();
982  if (hasEarlyClobbers) {
983  for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
984  MachineOperand &MO = MI->getOperand(i);
985  if (!MO.isReg()) continue;
986  unsigned Reg = MO.getReg();
987  if (!Reg || !TargetRegisterInfo::isPhysicalRegister(Reg)) continue;
988  // Look for physreg defs and tied uses.
989  if (!MO.isDef() && !MI->isRegTiedToDefOperand(i)) continue;
990  markRegUsedInInstr(Reg);
991  }
992  }
993 
994  unsigned DefOpEnd = MI->getNumOperands();
995  if (MI->isCall()) {
996  // Spill all virtregs before a call. This serves two purposes: 1. If an
997  // exception is thrown, the landing pad is going to expect to find
998  // registers in their spill slots, and 2. we don't have to wade through
999  // all the <imp-def> operands on the call instruction.
1000  DefOpEnd = VirtOpEnd;
1001  DEBUG(dbgs() << " Spilling remaining registers before call.\n");
1002  spillAll(MI);
1003 
1004  // The imp-defs are skipped below, but we still need to mark those
1005  // registers as used by the function.
1006  SkippedInstrs.insert(&MCID);
1007  }
1008 
1009  // Third scan.
1010  // Allocate defs and collect dead defs.
1011  for (unsigned i = 0; i != DefOpEnd; ++i) {
1012  MachineOperand &MO = MI->getOperand(i);
1013  if (!MO.isReg() || !MO.isDef() || !MO.getReg() || MO.isEarlyClobber())
1014  continue;
1015  unsigned Reg = MO.getReg();
1016 
1018  if (!MRI->isAllocatable(Reg)) continue;
1019  definePhysReg(MI, Reg, (MO.isImplicit() || MO.isDead()) ?
1020  regFree : regReserved);
1021  continue;
1022  }
1023  LiveRegMap::iterator LRI = defineVirtReg(MI, i, Reg, CopySrc);
1024  unsigned PhysReg = LRI->PhysReg;
1025  if (setPhysReg(MI, i, PhysReg)) {
1026  VirtDead.push_back(Reg);
1027  CopyDst = 0; // cancel coalescing;
1028  } else
1029  CopyDst = (CopyDst == Reg || CopyDst == PhysReg) ? PhysReg : 0;
1030  }
1031 
1032  // Kill dead defs after the scan to ensure that multiple defs of the same
1033  // register are allocated identically. We didn't need to do this for uses
1034  // because we are crerating our own kill flags, and they are always at the
1035  // last use.
1036  for (unsigned i = 0, e = VirtDead.size(); i != e; ++i)
1037  killVirtReg(VirtDead[i]);
1038  VirtDead.clear();
1039 
1040  for (UsedInInstrSet::iterator
1041  I = UsedInInstr.begin(), E = UsedInInstr.end(); I != E; ++I)
1042  MRI->setRegUnitUsed(*I);
1043 
1044  if (CopyDst && CopyDst == CopySrc && CopyDstSub == CopySrcSub) {
1045  DEBUG(dbgs() << "-- coalescing: " << *MI);
1046  Coalesced.push_back(MI);
1047  } else {
1048  DEBUG(dbgs() << "<< " << *MI);
1049  }
1050  }
1051 
1052  // Spill all physical registers holding virtual registers now.
1053  DEBUG(dbgs() << "Spilling live registers at end of block.\n");
1054  spillAll(MBB->getFirstTerminator());
1055 
1056  // Erase all the coalesced copies. We are delaying it until now because
1057  // LiveVirtRegs might refer to the instrs.
1058  for (unsigned i = 0, e = Coalesced.size(); i != e; ++i)
1059  MBB->erase(Coalesced[i]);
1060  NumCopies += Coalesced.size();
1061 
1062  DEBUG(MBB->dump());
1063 }
1064 
1065 /// runOnMachineFunction - Register allocate the whole function
1066 ///
1067 bool RAFast::runOnMachineFunction(MachineFunction &Fn) {
1068  DEBUG(dbgs() << "********** FAST REGISTER ALLOCATION **********\n"
1069  << "********** Function: " << Fn.getName() << '\n');
1070  MF = &Fn;
1071  MRI = &MF->getRegInfo();
1072  TM = &Fn.getTarget();
1073  TRI = TM->getRegisterInfo();
1074  TII = TM->getInstrInfo();
1075  MRI->freezeReservedRegs(Fn);
1076  RegClassInfo.runOnMachineFunction(Fn);
1077  UsedInInstr.clear();
1078  UsedInInstr.setUniverse(TRI->getNumRegUnits());
1079 
1080  assert(!MRI->isSSA() && "regalloc requires leaving SSA");
1081 
1082  // initialize the virtual->physical register map to have a 'null'
1083  // mapping for all virtual registers
1084  StackSlotForVirtReg.resize(MRI->getNumVirtRegs());
1085  LiveVirtRegs.setUniverse(MRI->getNumVirtRegs());
1086 
1087  // Loop over all of the basic blocks, eliminating virtual register references
1088  for (MachineFunction::iterator MBBi = Fn.begin(), MBBe = Fn.end();
1089  MBBi != MBBe; ++MBBi) {
1090  MBB = &*MBBi;
1091  AllocateBasicBlock();
1092  }
1093 
1094  // Add the clobber lists for all the instructions we skipped earlier.
1096  I = SkippedInstrs.begin(), E = SkippedInstrs.end(); I != E; ++I)
1097  if (const uint16_t *Defs = (*I)->getImplicitDefs())
1098  while (*Defs)
1099  MRI->setPhysRegUsed(*Defs++);
1100 
1101  // All machine operands and other references to virtual registers have been
1102  // replaced. Remove the virtual registers.
1103  MRI->clearVirtRegs();
1104 
1105  SkippedInstrs.clear();
1106  StackSlotForVirtReg.clear();
1107  LiveDbgValueMap.clear();
1108  return true;
1109 }
1110 
1112  return new RAFast();
1113 }
bool isImplicit() const
const MachineInstrBuilder & addMetadata(const MDNode *MD) const
void push_back(const T &Elt)
Definition: SmallVector.h:236
bool isRegTiedToDefOperand(unsigned UseOpIdx, unsigned *DefOpIdx=0) const
Definition: MachineInstr.h:862
STATISTIC(NumStores,"Number of stores added")
instr_iterator erase(instr_iterator I)
MachineInstr * getParent()
static unsigned virtReg2Index(unsigned Reg)
bool isValid() const
isValid - returns true if this iterator is not yet at the end.
std::pair< iterator, bool > insert(const ValueT &Val)
Definition: SparseSet.h:246
unsigned getNumDefs() const
Return the number of MachineOperands that are register definitions. Register definitions always occur...
Definition: MCInstrDesc.h:198
std::vector< unsigned >::const_iterator livein_iterator
iterator end() const
Definition: ArrayRef.h:98
bool isDead() const
static bool isVirtualRegister(unsigned Reg)
bool readsVirtualRegister(unsigned Reg) const
Definition: MachineInstr.h:731
const MCInstrDesc & getDesc() const
Definition: MachineInstr.h:257
MDNode - a tuple of other values.
Definition: Metadata.h:69
void setIsDead(bool Val=true)
const MDNode * getMetadata() const
livein_iterator livein_begin() const
const HexagonInstrInfo * TII
const char * getName() const
#define llvm_unreachable(msg)
bool isReg() const
isReg - Tests if this is a MO_Register operand.
bool isUndef() const
enum LLVM_ENUM_INT_TYPE(uint32_t)
Definition: Dwarf.h:27
ID
LLVM Calling Convention Representation.
Definition: CallingConv.h:26
#define false
Definition: ConvertUTF.c:64
const MachineInstrBuilder & addImm(int64_t Val) const
unsigned getNumOperands() const
Definition: MachineInstr.h:265
bool isKill() const
bool isCopyLike() const
Definition: MachineInstr.h:678
bool insert(const T &V)
Definition: SmallSet.h:59
int64_t getImm() const
const MachineBasicBlock * getParent() const
Definition: MachineInstr.h:119
bool isDebugValue() const
Definition: MachineInstr.h:639
bool isEarlyClobber() const
bundle_iterator< MachineInstr, instr_iterator > iterator
unsigned getAlignment() const
const MCRegisterClass & getRegClass(unsigned i) const
Returns the register class associated with the enumeration value. See class MCOperandInfo.
livein_iterator livein_end() const
const MCInstrInfo & MII
bool isIndirectDebugValue() const
Definition: MachineInstr.h:642
const MachineOperand & getOperand(unsigned i) const
Definition: MachineInstr.h:267
virtual void storeRegToStackSlot(MachineBasicBlock &MBB, MachineBasicBlock::iterator MBBI, unsigned SrcReg, bool isKill, int FrameIndex, const TargetRegisterClass *RC, const TargetRegisterInfo *TRI) const
bool isCopy() const
Definition: MachineInstr.h:669
iterator begin() const
Definition: ArrayRef.h:97
MachineInstrBuilder BuildMI(MachineFunction &MF, DebugLoc DL, const MCInstrDesc &MCID)
unsigned getSubReg() const
void emitError(StringRef Msg) const
int getOperandConstraint(unsigned OpNum, MCOI::OperandConstraint Constraint) const
Returns the value of the specific constraint if it is set. Returns -1 if it is not set...
Definition: MCInstrDesc.h:156
void setIsKill(bool Val=true)
bool isRegMask() const
isRegMask - Tests if this is a MO_RegisterMask operand.
SmallPtrSetIterator - This implements a const_iterator for SmallPtrSet.
Definition: SmallPtrSet.h:174
const uint32_t * getRegMask() const
bool isInlineAsm() const
Definition: MachineInstr.h:651
void setPreservesCFG()
Definition: Pass.cpp:249
raw_ostream & dbgs()
dbgs - Return a circular-buffered debug stream.
Definition: Debug.cpp:101
virtual void loadRegFromStackSlot(MachineBasicBlock &MBB, MachineBasicBlock::iterator MBBI, unsigned DestReg, int FrameIndex, const TargetRegisterClass *RC, const TargetRegisterInfo *TRI) const
bool count(const T &V) const
count - Return true if the element is in the set.
Definition: SmallSet.h:48
static bool isPhysicalRegister(unsigned Reg)
static RegisterRegAlloc fastRegAlloc("fast","fast register allocator", createFastRegisterAllocator)
virtual void getAnalysisUsage(AnalysisUsage &AU) const
void setReg(unsigned Reg)
DBG_VALUE - a mapping of the llvm.dbg.value intrinsic.
Definition: TargetOpcodes.h:69
#define I(x, y, z)
Definition: MD5.cpp:54
bool isCall(QueryType Type=AnyInBundle) const
Definition: MachineInstr.h:349
void setSubReg(unsigned subReg)
const TargetMachine & getTarget() const
unsigned getReg() const
getReg - Returns the register number.
FunctionPass * createFastRegisterAllocator()
BasicBlockListType::iterator iterator
void addRegisterDefined(unsigned Reg, const TargetRegisterInfo *RegInfo=0)
#define DEBUG(X)
Definition: Debug.h:97
bool addRegisterKilled(unsigned IncomingReg, const TargetRegisterInfo *RegInfo, bool AddIfNotFound=false)
const MCRegisterInfo & MRI
StringRef getName() const
tier< T1, T2 > tie(T1 &f, T2 &s)
Definition: STLExtras.h:216
DebugLoc getDebugLoc() const
Definition: MachineInstr.h:244
bool contains(unsigned Reg) const