15 #define DEBUG_TYPE "jit"
56 STATISTIC(NumBytes,
"Number of bytes of machine code compiled");
57 STATISTIC(NumRelos,
"Number of relocations applied");
58 STATISTIC(NumRetries,
"Number of retries with more memory");
72 class JITResolverState;
74 template<
typename ValueTy>
76 typedef JITResolverState *ExtraData;
77 static void onRAUW(JITResolverState *,
Value *Old,
Value *New) {
79 " RAUW on a value it has emitted.");
83 struct CallSiteValueMapConfig :
public NoRAUWValueMapConfig<Function*> {
84 typedef JITResolverState *ExtraData;
85 static void onDelete(JITResolverState *JRS,
Function *
F);
88 class JITResolverState {
91 FunctionToLazyStubMapTy;
92 typedef std::map<void*, AssertingVH<Function> > CallSiteToFunctionMapTy;
94 CallSiteValueMapConfig> FunctionToCallSitesMapTy;
95 typedef std::map<AssertingVH<GlobalValue>,
void*> GlobalToIndirectSymMapTy;
99 FunctionToLazyStubMapTy FunctionToLazyStubMap;
103 CallSiteToFunctionMapTy CallSiteToFunctionMap;
104 FunctionToCallSitesMapTy FunctionToCallSitesMap;
108 GlobalToIndirectSymMapTy GlobalToIndirectSymMap;
116 JITResolverState(
JIT *jit) : FunctionToLazyStubMap(this),
117 FunctionToCallSitesMap(this) {
123 FunctionToLazyStubMapTy& getFunctionToLazyStubMap(
125 assert(locked.
holds(TheJIT->lock));
126 return FunctionToLazyStubMap;
129 GlobalToIndirectSymMapTy& getGlobalToIndirectSymMap(
const MutexGuard& lck) {
130 assert(lck.
holds(TheJIT->lock));
131 return GlobalToIndirectSymMap;
134 std::pair<void *, Function *> LookupFunctionFromCallSite(
136 assert(locked.
holds(TheJIT->lock));
141 CallSiteToFunctionMapTy::const_iterator
I =
142 CallSiteToFunctionMap.upper_bound(CallSite);
143 assert(I != CallSiteToFunctionMap.begin() &&
144 "This is not a known call site!");
150 assert(locked.
holds(TheJIT->lock));
152 bool Inserted = CallSiteToFunctionMap.insert(
153 std::make_pair(CallSite, F)).second;
155 assert(Inserted &&
"Pair was already in CallSiteToFunctionMap");
156 FunctionToCallSitesMap[
F].insert(CallSite);
159 void EraseAllCallSitesForPrelocked(
Function *F);
164 void EraseAllCallSitesPrelocked();
171 typedef JITResolverState::CallSiteToFunctionMapTy CallSiteToFunctionMapTy;
172 typedef JITResolverState::GlobalToIndirectSymMapTy GlobalToIndirectSymMapTy;
178 JITResolverState state;
185 std::map<void*, void*> ExternalFnToStubMap;
188 std::map<void*, unsigned> revGOTMap;
189 unsigned nextGOTIndex;
197 explicit JITResolver(
JIT &jit, JITEmitter &je)
198 : state(&jit), nextGOTIndex(0), JE(je), TheJIT(&jit) {
206 void *getLazyFunctionStubIfAvailable(
Function *F);
210 void *getLazyFunctionStub(
Function *F);
214 void *getExternalFunctionStub(
void *FnAddr);
218 void *getGlobalValueIndirectSym(
GlobalValue *V,
void *GVAddress);
223 unsigned getGOTIndexForAddr(
void *addr);
228 static void *JITCompilerFn(
void *Stub);
231 class StubToResolverMapTy {
236 std::map<void*, JITResolver*> Map;
243 void RegisterStubResolver(
void *Stub, JITResolver *Resolver) {
245 Map.insert(std::make_pair(Stub, Resolver));
248 void UnregisterStubResolver(
void *Stub) {
253 JITResolver *getResolverFromStub(
void *Stub)
const {
259 std::map<void*, JITResolver*>::const_iterator I = Map.upper_bound(Stub);
260 assert(I != Map.begin() &&
"This is not a known stub!");
266 bool ResolverHasStubs(JITResolver* Resolver)
const {
268 for (std::map<void*, JITResolver*>::const_iterator I = Map.begin(),
269 E = Map.end(); I != E; ++
I) {
270 if (I->second == Resolver)
287 uint8_t *SavedBufferBegin, *SavedBufferEnd, *SavedCurBufferPtr;
293 uintptr_t SizeEstimate;
297 std::vector<MachineRelocation> Relocations;
302 std::vector<uintptr_t> MBBLocations;
310 void *ConstantPoolBase;
325 JITResolver Resolver;
346 void *ExceptionTable;
347 EmittedCode() : FunctionBody(0),
Code(0), ExceptionTable(0) {}
349 struct EmittedFunctionConfig :
public ValueMapConfig<const Function*> {
350 typedef JITEmitter *ExtraData;
351 static void onDelete(JITEmitter *,
const Function*);
355 EmittedFunctionConfig> EmittedFunctions;
364 : SizeEstimate(0), Resolver(jit, *this), MMI(0), CurFn(0),
365 EmittedFunctions(this), TheJIT(&jit) {
368 MemMgr->AllocateGOT();
369 DEBUG(
dbgs() <<
"JIT is managing a GOT\n");
377 JITResolver &getJITResolver() {
return Resolver; }
387 unsigned StubSize,
unsigned Alignment = 1);
388 void startGVStub(
void *Buffer,
unsigned StubSize);
390 virtual void *allocIndirectGV(
const GlobalValue *GV,
391 const uint8_t *Buffer,
size_t Size,
396 virtual void *allocateSpace(uintptr_t Size,
unsigned Alignment);
401 virtual void *allocateGlobal(uintptr_t Size,
unsigned Alignment);
404 Relocations.push_back(MR);
409 MBBLocations.resize((MBB->
getNumber()+1)*2);
410 MBBLocations[MBB->
getNumber()] = getCurrentPCValue();
413 (
void*)getCurrentPCValue());
415 << (
void*) getCurrentPCValue() <<
"]\n");
418 virtual uintptr_t getConstantPoolEntryAddress(
unsigned Entry)
const;
419 virtual uintptr_t getJumpTableEntryAddress(
unsigned Entry)
const;
423 MBBLocations[MBB->
getNumber()] &&
"MBB not emitted!");
434 void deallocateMemForFunction(
const Function *F);
436 virtual void processDebugLoc(
DebugLoc DL,
bool BeforePrintingInsn);
438 virtual void emitLabel(
MCSymbol *Label) {
439 LabelLocations[Label] = getCurrentPCValue();
443 return &LabelLocations;
446 virtual uintptr_t getLabelAddress(
MCSymbol *Label)
const {
447 assert(LabelLocations.count(Label) &&
"Label not emitted!");
448 return LabelLocations.
find(Label)->second;
456 void *getPointerToGlobal(
GlobalValue *GV,
void *Reference,
457 bool MayNeedFarStub);
458 void *getPointerToGVIndirectSym(
GlobalValue *V,
void *Reference);
462 void CallSiteValueMapConfig::onDelete(JITResolverState *JRS,
Function *F) {
463 JRS->EraseAllCallSitesForPrelocked(F);
466 void JITResolverState::EraseAllCallSitesForPrelocked(
Function *F) {
467 FunctionToCallSitesMapTy::iterator F2C = FunctionToCallSitesMap.find(F);
468 if (F2C == FunctionToCallSitesMap.end())
470 StubToResolverMapTy &S2RMap = *StubToResolverMap;
472 E = F2C->second.end(); I != E; ++
I) {
473 S2RMap.UnregisterStubResolver(*I);
474 bool Erased = CallSiteToFunctionMap.erase(*I);
476 assert(Erased &&
"Missing call site->function mapping");
478 FunctionToCallSitesMap.erase(F2C);
481 void JITResolverState::EraseAllCallSitesPrelocked() {
482 StubToResolverMapTy &S2RMap = *StubToResolverMap;
483 for (CallSiteToFunctionMapTy::const_iterator
484 I = CallSiteToFunctionMap.begin(),
485 E = CallSiteToFunctionMap.end(); I != E; ++
I) {
486 S2RMap.UnregisterStubResolver(I->first);
488 CallSiteToFunctionMap.clear();
489 FunctionToCallSitesMap.clear();
492 JITResolver::~JITResolver() {
494 state.EraseAllCallSitesPrelocked();
495 assert(!StubToResolverMap->ResolverHasStubs(
this) &&
496 "Resolver destroyed with stubs still alive.");
501 void *JITResolver::getLazyFunctionStubIfAvailable(
Function *F) {
505 return state.getFunctionToLazyStubMap(locked).lookup(F);
510 void *JITResolver::getLazyFunctionStub(
Function *F) {
514 void *&Stub = state.getFunctionToLazyStubMap(locked)[
F];
515 if (Stub)
return Stub;
519 void *Actual = TheJIT->isCompilingLazily()
520 ? (
void *)(
intptr_t)LazyResolverFn : (
void *)0;
525 Actual = TheJIT->getPointerToFunction(F);
529 if (!Actual)
return 0;
536 Stub = TheJIT->getJITInfo().emitFunctionStub(F, Actual, JE);
539 if (Actual != (
void*)(
intptr_t)LazyResolverFn) {
543 TheJIT->updateGlobalMapping(F, Stub);
546 DEBUG(
dbgs() <<
"JIT: Lazy stub emitted at [" << Stub <<
"] for function '"
549 if (TheJIT->isCompilingLazily()) {
552 StubToResolverMap->RegisterStubResolver(Stub,
this);
556 state.AddCallSite(locked, Stub, F);
557 }
else if (!Actual) {
562 "'Actual' should have been set above.");
563 TheJIT->addPendingFunction(F);
571 void *JITResolver::getGlobalValueIndirectSym(
GlobalValue *GV,
void *GVAddress) {
575 void *&IndirectSym = state.getGlobalToIndirectSymMap(locked)[GV];
576 if (IndirectSym)
return IndirectSym;
579 IndirectSym = TheJIT->getJITInfo().emitGlobalValueIndirectSym(GV, GVAddress,
582 DEBUG(
dbgs() <<
"JIT: Indirect symbol emitted at [" << IndirectSym
583 <<
"] for GV '" << GV->
getName() <<
"'\n");
590 void *JITResolver::getExternalFunctionStub(
void *FnAddr) {
592 void *&Stub = ExternalFnToStubMap[FnAddr];
593 if (Stub)
return Stub;
597 Stub = TheJIT->getJITInfo().emitFunctionStub(0, FnAddr, JE);
600 DEBUG(
dbgs() <<
"JIT: Stub emitted at [" << Stub
601 <<
"] for external function at '" << FnAddr <<
"'\n");
605 unsigned JITResolver::getGOTIndexForAddr(
void* addr) {
606 unsigned idx = revGOTMap[addr];
608 idx = ++nextGOTIndex;
609 revGOTMap[addr] = idx;
610 DEBUG(
dbgs() <<
"JIT: Adding GOT entry " << idx <<
" for addr ["
619 void *JITResolver::JITCompilerFn(
void *Stub) {
620 JITResolver *JR = StubToResolverMap->getResolverFromStub(Stub);
621 assert(JR &&
"Unable to find the corresponding JITResolver to the call site");
634 std::pair<void*, Function*> I =
635 JR->state.LookupFunctionFromCallSite(locked, Stub);
641 void *Result = JR->TheJIT->getPointerToGlobalIfAvailable(F);
647 if (!JR->TheJIT->isCompilingLazily()) {
650 + F->
getName() +
"' when lazy compiles are disabled!");
654 <<
"' In stub ptr = " << Stub <<
" actual ptr = "
655 << ActualPtr <<
"\n");
658 Result = JR->TheJIT->getPointerToFunction(F);
677 if(JR->revGOTMap.find(Stub) != JR->revGOTMap.end())
678 JR->revGOTMap[Result] = JR->revGOTMap[Stub];
686 void *JITEmitter::getPointerToGlobal(
GlobalValue *V,
void *Reference,
687 bool MayNeedFarStub) {
689 return TheJIT->getOrEmitGlobalVariable(GV);
692 return TheJIT->getPointerToGlobal(GA->resolveAliasedGlobal(
false));
697 void *FnStub = Resolver.getLazyFunctionStubIfAvailable(F);
708 if (!MayNeedFarStub) {
710 void *ResultPtr = TheJIT->getPointerToGlobalIfAvailable(F);
711 if (ResultPtr)
return ResultPtr;
716 return TheJIT->getPointerToFunction(F);
722 return Resolver.getLazyFunctionStub(F);
725 void *JITEmitter::getPointerToGVIndirectSym(
GlobalValue *V,
void *Reference) {
728 void *GVAddress = getPointerToGlobal(V, Reference,
false);
729 void *StubAddr = Resolver.getGlobalValueIndirectSym(V, GVAddress);
733 void JITEmitter::processDebugLoc(
DebugLoc DL,
bool BeforePrintingInsn) {
735 if (!BeforePrintingInsn)
return;
737 const LLVMContext &Context = EmissionDetails.MF->getFunction()->getContext();
739 if (DL.
getScope(Context) != 0 && PrevDL != DL) {
741 NextLine.
Address = getCurrentPCValue();
743 EmissionDetails.LineStarts.push_back(NextLine);
752 if (Constants.empty())
return 0;
755 for (
unsigned i = 0, e = Constants.size(); i != e; ++i) {
758 Size = (Size + AlignMask) & ~AlignMask;
766 DEBUG(
dbgs() <<
"JIT: Starting CodeGen of Function "
769 uintptr_t ActualSize = 0;
771 MemMgr->setMemoryWritable();
773 if (SizeEstimate > 0) {
775 ActualSize = SizeEstimate;
778 BufferBegin = CurBufferPtr = MemMgr->startFunctionBody(F.
getFunction(),
780 BufferEnd = BufferBegin+ActualSize;
781 EmittedFunctions[F.
getFunction()].FunctionBody = BufferBegin;
788 initJumpTableInfo(MJTI);
792 TheJIT->updateGlobalMapping(F.
getFunction(), CurBufferPtr);
793 EmittedFunctions[F.
getFunction()].Code = CurBufferPtr;
795 MBBLocations.clear();
797 EmissionDetails.MF = &
F;
798 EmissionDetails.LineStarts.clear();
802 if (CurBufferPtr == BufferEnd) {
805 MemMgr->endFunctionBody(F.
getFunction(), BufferBegin, CurBufferPtr);
806 retryWithMoreMemory(F);
811 emitJumpTableInfo(MJTI);
816 (uint8_t *)TheJIT->getPointerToGlobalIfAvailable(F.
getFunction());
819 uint8_t *FnEnd = CurBufferPtr;
821 if (!Relocations.empty()) {
823 NumRelos += Relocations.
size();
826 for (
unsigned i = 0, e = Relocations.size(); i != e; ++i) {
834 << ResultPtr <<
"]\n");
838 ResultPtr = Resolver.getExternalFunctionStub(ResultPtr);
845 ResultPtr = getPointerToGVIndirectSym(
848 ResultPtr = (
void*)getMachineBasicBlockAddress(MR.
getBasicBlock());
863 unsigned idx = Resolver.getGOTIndexForAddr(ResultPtr);
865 if (((
void**)MemMgr->getGOTBase())[idx] != ResultPtr) {
866 DEBUG(
dbgs() <<
"JIT: GOT was out of date for " << ResultPtr
867 <<
" pointing at " << ((
void**)MemMgr->getGOTBase())[idx]
869 ((
void**)MemMgr->getGOTBase())[idx] = ResultPtr;
875 TheJIT->getJITInfo().relocate(BufferBegin, &Relocations[0],
876 Relocations.size(), MemMgr->getGOTBase());
880 if (MemMgr->isManagingGOT()) {
881 unsigned idx = Resolver.getGOTIndexForAddr((
void*)BufferBegin);
882 if (((
void**)MemMgr->getGOTBase())[idx] != (
void*)BufferBegin) {
883 DEBUG(
dbgs() <<
"JIT: GOT was out of date for " << (
void*)BufferBegin
884 <<
" pointing at " << ((
void**)MemMgr->getGOTBase())[idx]
886 ((
void**)MemMgr->getGOTBase())[idx] = (
void*)BufferBegin;
892 MemMgr->endFunctionBody(F.
getFunction(), BufferBegin, CurBufferPtr);
894 if (CurBufferPtr == BufferEnd) {
895 retryWithMoreMemory(F);
903 BufferBegin = CurBufferPtr = 0;
904 NumBytes += FnEnd-FnStart;
909 TheJIT->NotifyFunctionEmitted(*F.
getFunction(), FnStart, FnEnd-FnStart,
915 DEBUG(
dbgs() <<
"JIT: Finished CodeGen of [" << (
void*)FnStart
916 <<
"] Function: " << F.
getName()
917 <<
": " << (FnEnd-FnStart) <<
" bytes of text, "
918 << Relocations.
size() <<
" relocations\n");
921 ConstPoolAddresses.clear();
924 MemMgr->setMemoryExecutable();
928 dbgs() <<
"JIT: Disassembled code:\n";
932 dbgs() <<
"JIT: Binary code:\n";
933 uint8_t* q = FnStart;
934 for (
int i = 0; q < FnEnd; q += 4, ++i) {
938 dbgs() <<
"JIT: " << (long)(q - FnStart) <<
": ";
940 for (
int j = 3; j >= 0; --j) {
944 dbgs() << (
unsigned short)q[j];
963 DEBUG(
dbgs() <<
"JIT: Ran out of space for native code. Reattempting.\n");
965 ConstPoolAddresses.clear();
969 SizeEstimate = (uintptr_t)(2 * (BufferEnd - BufferBegin));
980 void JITEmitter::deallocateMemForFunction(
const Function *F) {
982 Emitted = EmittedFunctions.find(F);
983 if (Emitted != EmittedFunctions.end()) {
984 MemMgr->deallocateFunctionBody(Emitted->
second.FunctionBody);
985 TheJIT->NotifyFreeingMachineCode(Emitted->
second.Code);
987 EmittedFunctions.erase(Emitted);
992 void *JITEmitter::allocateSpace(uintptr_t Size,
unsigned Alignment) {
999 BufferBegin = CurBufferPtr = MemMgr->allocateSpace(Size, Alignment);
1000 BufferEnd = BufferBegin+Size;
1001 return CurBufferPtr;
1004 void *JITEmitter::allocateGlobal(uintptr_t Size,
unsigned Alignment) {
1006 return MemMgr->allocateGlobal(Size, Alignment);
1010 if (TheJIT->getJITInfo().hasCustomConstantPool())
1014 if (Constants.empty())
return;
1018 ConstantPoolBase = allocateSpace(Size, Align);
1021 if (ConstantPoolBase == 0)
return;
1023 DEBUG(
dbgs() <<
"JIT: Emitted constant pool at [" << ConstantPoolBase
1024 <<
"] (size: " << Size <<
", alignment: " << Align <<
")\n");
1027 unsigned Offset = 0;
1028 for (
unsigned i = 0, e = Constants.size(); i != e; ++i) {
1031 Offset = (Offset + AlignMask) & ~AlignMask;
1033 uintptr_t CAddr = (uintptr_t)ConstantPoolBase + Offset;
1034 ConstPoolAddresses.push_back(CAddr);
1038 "entry has not been implemented!");
1040 TheJIT->InitializeMemory(CPE.
Val.
ConstVal, (
void*)CAddr);
1041 DEBUG(
dbgs() <<
"JIT: CP" << i <<
" at [0x";
1045 Offset += TheJIT->getDataLayout()->getTypeAllocSize(Ty);
1050 if (TheJIT->getJITInfo().hasCustomJumpTables())
1055 const std::vector<MachineJumpTableEntry> &JT = MJTI->
getJumpTables();
1056 if (JT.empty())
return;
1058 unsigned NumEntries = 0;
1059 for (
unsigned i = 0, e = JT.size(); i != e; ++i)
1060 NumEntries += JT[i].MBBs.size();
1062 unsigned EntrySize = MJTI->
getEntrySize(*TheJIT->getDataLayout());
1068 JumpTableBase = allocateSpace(NumEntries * EntrySize,
1073 if (TheJIT->getJITInfo().hasCustomJumpTables())
1076 const std::vector<MachineJumpTableEntry> &JT = MJTI->
getJumpTables();
1077 if (JT.empty() || JumpTableBase == 0)
return;
1086 assert(MJTI->
getEntrySize(*TheJIT->getDataLayout()) ==
sizeof(
void*) &&
1093 for (
unsigned i = 0, e = JT.size(); i != e; ++i) {
1094 const std::vector<MachineBasicBlock*> &MBBs = JT[i].MBBs;
1097 for (
unsigned mi = 0, me = MBBs.size(); mi != me; ++mi)
1098 *SlotPtr++ = getMachineBasicBlockAddress(MBBs[mi]);
1106 assert(MJTI->
getEntrySize(*TheJIT->getDataLayout()) == 4&&
"Cross JIT'ing?");
1109 int *SlotPtr = (
int*)JumpTableBase;
1111 for (
unsigned i = 0, e = JT.size(); i != e; ++i) {
1112 const std::vector<MachineBasicBlock*> &MBBs = JT[i].MBBs;
1115 uintptr_t Base = (uintptr_t)SlotPtr;
1116 for (
unsigned mi = 0, me = MBBs.size(); mi != me; ++mi) {
1117 uintptr_t MBBAddr = getMachineBasicBlockAddress(MBBs[mi]);
1119 *SlotPtr++ = TheJIT->getJITInfo().getPICJumpTableEntry(MBBAddr, Base);
1126 "JT Info emission not implemented for GPRel64BlockAddress yet.");
1130 void JITEmitter::startGVStub(
const GlobalValue* GV,
1131 unsigned StubSize,
unsigned Alignment) {
1132 SavedBufferBegin = BufferBegin;
1133 SavedBufferEnd = BufferEnd;
1134 SavedCurBufferPtr = CurBufferPtr;
1136 BufferBegin = CurBufferPtr = MemMgr->allocateStub(GV, StubSize, Alignment);
1137 BufferEnd = BufferBegin+StubSize+1;
1140 void JITEmitter::startGVStub(
void *Buffer,
unsigned StubSize) {
1141 SavedBufferBegin = BufferBegin;
1142 SavedBufferEnd = BufferEnd;
1143 SavedCurBufferPtr = CurBufferPtr;
1145 BufferBegin = CurBufferPtr = (uint8_t *)Buffer;
1146 BufferEnd = BufferBegin+StubSize+1;
1149 void JITEmitter::finishGVStub() {
1150 assert(CurBufferPtr != BufferEnd &&
"Stub overflowed allocated space.");
1151 NumBytes += getCurrentPCOffset();
1152 BufferBegin = SavedBufferBegin;
1153 BufferEnd = SavedBufferEnd;
1154 CurBufferPtr = SavedCurBufferPtr;
1157 void *JITEmitter::allocIndirectGV(
const GlobalValue *GV,
1158 const uint8_t *Buffer,
size_t Size,
1159 unsigned Alignment) {
1160 uint8_t *IndGV = MemMgr->allocateStub(GV, Size, Alignment);
1161 memcpy(IndGV, Buffer, Size);
1169 uintptr_t JITEmitter::getConstantPoolEntryAddress(
unsigned ConstantNum)
const {
1170 assert(ConstantNum < ConstantPool->getConstants().size() &&
1171 "Invalid ConstantPoolIndex!");
1172 return ConstPoolAddresses[ConstantNum];
1178 uintptr_t JITEmitter::getJumpTableEntryAddress(
unsigned Index)
const {
1179 const std::vector<MachineJumpTableEntry> &JT =
JumpTable->getJumpTables();
1180 assert(Index < JT.size() &&
"Invalid jump table index!");
1182 unsigned EntrySize =
JumpTable->getEntrySize(*TheJIT->getDataLayout());
1184 unsigned Offset = 0;
1185 for (
unsigned i = 0; i < Index; ++i)
1186 Offset += JT[i].MBBs.size();
1188 Offset *= EntrySize;
1190 return (uintptr_t)((
char *)JumpTableBase + Offset);
1193 void JITEmitter::EmittedFunctionConfig::onDelete(
1194 JITEmitter *Emitter,
const Function *F) {
1195 Emitter->deallocateMemForFunction(F);
1197 void JITEmitter::EmittedFunctionConfig::onRAUW(
1200 " RAUW on a value it has emitted.");
1210 return new JITEmitter(jit, JMM, tm);
1224 return JE->getJITResolver().getLazyFunctionStub(F);
1227 void JIT::updateFunctionStub(
Function *F) {
1230 void *Stub = JE->getJITResolver().getLazyFunctionStub(F);
1232 assert(Addr != Stub &&
"Function must have non-stub address to be updated.");
1237 JE->startGVStub(Stub, layout.
Size);
1250 static_cast<JITEmitter*
>(JCE)->deallocateMemForFunction(F);
The machine constant pool.
bool isJumpTableIndex() const
size_t size() const
size - Get the string size.
unsigned getAlignment() const
void setResultPointer(void *Ptr)
bool hasAvailableExternallyLinkage() const
unsigned getJumpTableIndex() const
const Function * getFunction() const
virtual void * allocateSpace(uintptr_t Size, unsigned Alignment)
void setGOTIndex(unsigned idx)
setGOTIndex - Set the GOT index to a specific value.
unsigned getConstantPoolIndex() const
LLVM_ATTRIBUTE_NORETURN void report_fatal_error(const char *reason, bool gen_crash_diag=true)
StringRef getName() const
const std::vector< MachineJumpTableEntry > & getJumpTables() const
bool isUnknown() const
isUnknown - Return true if this is an unknown location.
static void InvalidateInstructionCache(const void *Addr, size_t Len)
#define llvm_unreachable(msg)
static JITMemoryManager * CreateDefaultMemManager()
bool letTargetResolve() const
void * getPointerToFunctionOrStub(Function *F)
virtual StubLayout getStubLayout()
Returns the maximum size and alignment for a call stub on this target.
virtual void * emitFunctionStub(const Function *F, void *Target, JITCodeEmitter &JCE)
bool isIndirectSymbol() const
raw_ostream & write_hex(unsigned long long N)
write_hex - Output N in hexadecimal, without any prefix or padding.
bool isMaterializable() const
const MachineJumpTableInfo * getJumpTableInfo() const
An entry in a MachineConstantPool.
MachineBasicBlock * getBasicBlock() const
const BasicBlock * getBasicBlock() const
bool isMachineConstantPoolEntry() const
const Constant * ConstVal
unsigned getEntrySize(const DataLayout &TD) const
getEntrySize - Return the size of each entry in the jump table.
unsigned getEntryAlignment(const DataLayout &TD) const
getEntryAlignment - Return the alignment of each entry in the jump table.
Guard a section of code with a Mutex.
std::string disassembleBuffer(uint8_t *start, size_t length, uint64_t pc=0)
bool mayNeedFarStub() const
static ManagedStatic< sys::SmartRWMutex< true > > Lock
intptr_t getMachineCodeOffset() const
void freeMachineCodeForFunction(Function *F)
MachineConstantPool * getConstantPool()
void * getPointerToGlobalIfAvailable(const GlobalValue *GV)
bool isGOTRelative() const
union llvm::MachineConstantPoolEntry::@29 Val
The constant itself.
STATISTIC(NumBytes,"Number of bytes of machine code compiled")
void * updateGlobalMapping(const GlobalValue *GV, void *Addr)
SmallPtrSetIterator - This implements a const_iterator for SmallPtrSet.
MDNode * getScope(const LLVMContext &Ctx) const
bool isExternalSymbol() const
uint64_t getTypeAllocSize(Type *Ty) const
bool isConstantPoolIndex() const
unsigned getConstantPoolAlignment() const
raw_ostream & dbgs()
dbgs - Return a circular-buffered debug stream.
virtual LazyResolverFn getLazyResolverFunction(JITCompilerFn)
const char * getExternalSymbol() const
static cl::opt< AlignMode > Align(cl::desc("Load/store alignment support"), cl::Hidden, cl::init(DefaultAlign), cl::values(clEnumValN(DefaultAlign,"arm-default-align","Generate unaligned accesses only on hardware/OS ""combinations that are known to support them"), clEnumValN(StrictAlign,"arm-strict-align","Disallow all unaligned memory accesses"), clEnumValN(NoStrictAlign,"arm-no-strict-align","Allow unaligned memory accesses"), clEnumValEnd))
GlobalValue * getGlobalValue() const
bool hasAddressTaken() const
bool holds(const sys::Mutex &lock) const
bool isDeclaration() const
JITCodeEmitter * getCodeEmitter() const
bool isGlobalValue() const
JTEntryKind getEntryKind() const
LLVM Value Representation.
BasicBlockListType::iterator iterator
const std::vector< MachineConstantPoolEntry > & getConstants() const
uintptr_t Address
The address at which the current line changes.
static bool isNonGhostDeclaration(const Function *F)
StringRef getName() const
static unsigned GetConstantPoolSizeInBytes(MachineConstantPool *MCP, const DataLayout *TD)
TargetJITInfo & getJITInfo() const
iterator find(const KeyT &Val)
INITIALIZE_PASS(GlobalMerge,"global-merge","Global Merge", false, false) bool GlobalMerge const DataLayout * TD
bool isBasicBlock() const
Records the required size and alignment for a call stub in bytes.