22 #define DEBUG_TYPE "tsan"
52 "tsan-instrument-memory-accesses",
cl::init(
true),
55 "tsan-instrument-func-entry-exit",
cl::init(
true),
58 "tsan-instrument-atomics",
cl::init(
true),
61 "tsan-instrument-memintrinsics",
cl::init(
true),
64 STATISTIC(NumInstrumentedReads,
"Number of instrumented reads");
65 STATISTIC(NumInstrumentedWrites,
"Number of instrumented writes");
67 "Number of reads ignored due to following writes");
68 STATISTIC(NumAccessesWithBadSize,
"Number of accesses with bad size");
69 STATISTIC(NumInstrumentedVtableWrites,
"Number of vtable ptr writes");
70 STATISTIC(NumInstrumentedVtableReads,
"Number of vtable ptr reads");
71 STATISTIC(NumOmittedReadsFromConstantGlobals,
72 "Number of reads from constant globals");
73 STATISTIC(NumOmittedReadsFromVtable,
"Number of vtable reads");
84 const char *getPassName()
const;
86 bool doInitialization(
Module &M);
90 void initializeCallbacks(
Module &M);
96 bool addrPointsToConstantData(
Value *Addr);
97 int getMemoryAccessFuncIndex(
Value *Addr);
119 Function *MemmoveFn, *MemcpyFn, *MemsetFn;
125 "ThreadSanitizer: detects data races.",
128 const
char *ThreadSanitizer::getPassName()
const {
129 return "ThreadSanitizer";
133 return new ThreadSanitizer(BlacklistFile);
137 if (
Function *
F = dyn_cast<Function>(FuncOrBitcast))
139 FuncOrBitcast->
dump();
143 void ThreadSanitizer::initializeCallbacks(
Module &M) {
147 "__tsan_func_entry", IRB.getVoidTy(), IRB.getInt8PtrTy(), NULL));
149 "__tsan_func_exit", IRB.getVoidTy(), NULL));
150 OrdTy = IRB.getInt32Ty();
152 const size_t ByteSize = 1 << i;
153 const size_t BitSize = ByteSize * 8;
156 ReadName, IRB.getVoidTy(), IRB.getInt8PtrTy(), NULL));
160 WriteName, IRB.getVoidTy(), IRB.getInt8PtrTy(), NULL));
167 AtomicLoadName, Ty, PtrTy, OrdTy, NULL));
172 AtomicStoreName, IRB.getVoidTy(), PtrTy, Ty, OrdTy,
177 TsanAtomicRMW[op][i] = NULL;
178 const char *NamePart = NULL;
180 NamePart =
"_exchange";
182 NamePart =
"_fetch_add";
184 NamePart =
"_fetch_sub";
186 NamePart =
"_fetch_and";
188 NamePart =
"_fetch_or";
190 NamePart =
"_fetch_xor";
192 NamePart =
"_fetch_nand";
197 RMWName, Ty, PtrTy, Ty, OrdTy, NULL));
201 "_compare_exchange_val");
203 AtomicCASName, Ty, PtrTy, Ty, Ty, OrdTy, OrdTy, NULL));
206 "__tsan_vptr_update", IRB.getVoidTy(), IRB.getInt8PtrTy(),
207 IRB.getInt8PtrTy(), NULL));
209 "__tsan_vptr_read", IRB.getVoidTy(), IRB.getInt8PtrTy(), NULL));
211 "__tsan_atomic_thread_fence", IRB.getVoidTy(), OrdTy, NULL));
213 "__tsan_atomic_signal_fence", IRB.getVoidTy(), OrdTy, NULL));
216 "memmove", IRB.getInt8PtrTy(), IRB.getInt8PtrTy(),
217 IRB.getInt8PtrTy(), IntptrTy, NULL));
219 "memcpy", IRB.getInt8PtrTy(), IRB.getInt8PtrTy(), IRB.getInt8PtrTy(),
222 "memset", IRB.getInt8PtrTy(), IRB.getInt8PtrTy(), IRB.getInt32Ty(),
226 bool ThreadSanitizer::doInitialization(
Module &M) {
227 TD = getAnalysisIfAvailable<DataLayout>();
236 IRB.getVoidTy(), NULL);
244 return Tag->isTBAAVtableAccess();
248 bool ThreadSanitizer::addrPointsToConstantData(
Value *Addr) {
251 Addr = GEP->getPointerOperand();
254 if (GV->isConstant()) {
256 NumOmittedReadsFromConstantGlobals++;
259 }
else if (
LoadInst *L = dyn_cast<LoadInst>(Addr)) {
262 NumOmittedReadsFromVtable++;
280 void ThreadSanitizer::chooseInstructionsToInstrument(
286 E = Local.
rend(); It != E; ++It) {
293 if (WriteTargets.
count(Addr)) {
295 NumOmittedReadsBeforeWrite++;
298 if (addrPointsToConstantData(Addr)) {
311 if (
StoreInst *SI = dyn_cast<StoreInst>(I))
312 return SI->isAtomic() && SI->getSynchScope() ==
CrossThread;
313 if (isa<AtomicRMWInst>(I))
315 if (isa<AtomicCmpXchgInst>(I))
317 if (isa<FenceInst>(I))
322 bool ThreadSanitizer::runOnFunction(
Function &
F) {
323 if (!
TD)
return false;
324 if (
BL->isIn(F))
return false;
332 bool HasCalls =
false;
342 else if (isa<LoadInst>(BI) || isa<StoreInst>(BI))
344 else if (isa<ReturnInst>(BI))
345 RetVec.push_back(BI);
346 else if (isa<CallInst>(BI) || isa<InvokeInst>(BI)) {
347 if (isa<MemIntrinsic>(BI))
350 chooseInstructionsToInstrument(LocalLoadsAndStores, AllLoadsAndStores);
353 chooseInstructionsToInstrument(LocalLoadsAndStores, AllLoadsAndStores);
362 for (
size_t i = 0, n = AllLoadsAndStores.
size(); i < n; ++i) {
363 Res |= instrumentLoadOrStore(AllLoadsAndStores[i]);
368 for (
size_t i = 0, n = AtomicAccesses.
size(); i < n; ++i) {
369 Res |= instrumentAtomic(AtomicAccesses[i]);
373 for (
size_t i = 0, n = MemIntrinCalls.
size(); i < n; ++i) {
374 Res |= instrumentMemIntrinsic(MemIntrinCalls[i]);
380 Value *ReturnAddress = IRB.CreateCall(
383 IRB.CreateCall(TsanFuncEntry, ReturnAddress);
384 for (
size_t i = 0, n = RetVec.size(); i < n; ++i) {
386 IRBRet.CreateCall(TsanFuncExit);
393 bool ThreadSanitizer::instrumentLoadOrStore(
Instruction *I) {
395 bool IsWrite = isa<StoreInst>(*I);
396 Value *Addr = IsWrite
399 int Idx = getMemoryAccessFuncIndex(Addr);
403 DEBUG(
dbgs() <<
" VPTR : " << *I <<
"\n");
404 Value *StoredValue = cast<StoreInst>(
I)->getValueOperand();
406 if (isa<IntegerType>(StoredValue->
getType()))
407 StoredValue = IRB.CreateIntToPtr(StoredValue, IRB.getInt8PtrTy());
409 IRB.CreateCall2(TsanVptrUpdate,
410 IRB.CreatePointerCast(Addr, IRB.getInt8PtrTy()),
411 IRB.CreatePointerCast(StoredValue, IRB.getInt8PtrTy()));
412 NumInstrumentedVtableWrites++;
416 IRB.CreateCall(TsanVptrLoad,
417 IRB.CreatePointerCast(Addr, IRB.getInt8PtrTy()));
418 NumInstrumentedVtableReads++;
421 Value *OnAccessFunc = IsWrite ? TsanWrite[Idx] : TsanRead[Idx];
422 IRB.CreateCall(OnAccessFunc, IRB.CreatePointerCast(Addr, IRB.getInt8PtrTy()));
423 if (IsWrite) NumInstrumentedWrites++;
424 else NumInstrumentedReads++;
466 bool ThreadSanitizer::instrumentMemIntrinsic(
Instruction *I) {
468 if (
MemSetInst *M = dyn_cast<MemSetInst>(I)) {
469 IRB.CreateCall3(MemsetFn,
470 IRB.CreatePointerCast(M->getArgOperand(0), IRB.getInt8PtrTy()),
471 IRB.CreateIntCast(M->getArgOperand(1), IRB.getInt32Ty(),
false),
472 IRB.CreateIntCast(M->getArgOperand(2), IntptrTy,
false));
475 IRB.CreateCall3(isa<MemCpyInst>(M) ? MemcpyFn : MemmoveFn,
476 IRB.CreatePointerCast(M->getArgOperand(0), IRB.getInt8PtrTy()),
477 IRB.CreatePointerCast(M->getArgOperand(1), IRB.getInt8PtrTy()),
478 IRB.CreateIntCast(M->getArgOperand(2), IntptrTy,
false));
492 bool ThreadSanitizer::instrumentAtomic(
Instruction *I) {
495 Value *Addr =
LI->getPointerOperand();
496 int Idx = getMemoryAccessFuncIndex(Addr);
499 const size_t ByteSize = 1 << Idx;
500 const size_t BitSize = ByteSize * 8;
503 Value *Args[] = {IRB.CreatePointerCast(Addr, PtrTy),
509 }
else if (
StoreInst *SI = dyn_cast<StoreInst>(I)) {
510 Value *Addr = SI->getPointerOperand();
511 int Idx = getMemoryAccessFuncIndex(Addr);
514 const size_t ByteSize = 1 << Idx;
515 const size_t BitSize = ByteSize * 8;
518 Value *Args[] = {IRB.CreatePointerCast(Addr, PtrTy),
519 IRB.CreateIntCast(SI->getValueOperand(), Ty,
false),
524 }
else if (
AtomicRMWInst *RMWI = dyn_cast<AtomicRMWInst>(I)) {
525 Value *Addr = RMWI->getPointerOperand();
526 int Idx = getMemoryAccessFuncIndex(Addr);
529 Function *F = TsanAtomicRMW[RMWI->getOperation()][Idx];
532 const size_t ByteSize = 1 << Idx;
533 const size_t BitSize = ByteSize * 8;
536 Value *Args[] = {IRB.CreatePointerCast(Addr, PtrTy),
537 IRB.CreateIntCast(RMWI->getValOperand(), Ty,
false),
542 Value *Addr = CASI->getPointerOperand();
543 int Idx = getMemoryAccessFuncIndex(Addr);
546 const size_t ByteSize = 1 << Idx;
547 const size_t BitSize = ByteSize * 8;
550 Value *Args[] = {IRB.CreatePointerCast(Addr, PtrTy),
551 IRB.CreateIntCast(CASI->getCompareOperand(), Ty,
false),
552 IRB.CreateIntCast(CASI->getNewValOperand(), Ty,
false),
557 }
else if (
FenceInst *FI = dyn_cast<FenceInst>(I)) {
560 TsanAtomicSignalFence : TsanAtomicThreadFence;
567 int ThreadSanitizer::getMemoryAccessFuncIndex(
Value *Addr) {
569 Type *OrigTy = cast<PointerType>(OrigPtrTy)->getElementType();
571 uint32_t TypeSize =
TD->getTypeStoreSizeInBits(OrigTy);
572 if (TypeSize != 8 && TypeSize != 16 &&
573 TypeSize != 32 && TypeSize != 64 && TypeSize != 128) {
574 NumAccessesWithBadSize++;
579 assert(Idx < kNumberOfAccessSizes);
void push_back(const T &Elt)
void ReplaceInstWithInst(BasicBlock::InstListType &BIL, BasicBlock::iterator &BI, Instruction *I)
The main container class for the LLVM Intermediate Representation.
void appendToGlobalCtors(Module &M, Function *F, int Priority)
static SpecialCaseList * createOrDie(const StringRef Path)
MDNode - a tuple of other values.
enable_if_c<!is_simple_type< Y >::value, typename cast_retty< X, const Y >::ret_type >::type cast(const Y &Val)
LoopInfoBase< BlockT, LoopT > * LI
LLVM_ATTRIBUTE_NORETURN void report_fatal_error(const char *reason, bool gen_crash_diag=true)
static cl::opt< bool > ClInstrumentAtomics("tsan-instrument-atomics", cl::init(true), cl::desc("Instrument atomics"), cl::Hidden)
void dump() const
dump - Support for debugging, callable in GDB: V->dump()
static Value * getPointerOperand(Instruction &Inst)
Instruction * getFirstNonPHI()
Returns a pointer to the first instruction in this block that is not a PHINode instruction.
This provides a uniform API for creating instructions and inserting them into a basic block: either a...
ID
LLVM Calling Convention Representation.
enable_if_c< std::numeric_limits< T >::is_integer &&!std::numeric_limits< T >::is_signed, std::size_t >::type countTrailingZeros(T Val, ZeroBehavior ZB=ZB_Width)
Count number of 0's from the least significant bit to the most stopping at the first 1...
Function * getDeclaration(Module *M, ID id, ArrayRef< Type * > Tys=None)
static const size_t kNumberOfAccessSizes
initializer< Ty > init(const Ty &Val)
Constant * getOrInsertFunction(StringRef Name, FunctionType *T, AttributeSet AttributeList)
LLVM Basic Block Representation.
LLVM Constant Representation.
static ConstantInt * createFailOrdering(IRBuilder<> *IRB, AtomicOrdering ord)
Value * getPointerOperand()
Integer representation type.
STATISTIC(NumInstrumentedReads,"Number of instrumented reads")
static std::string itostr(int64_t X)
static cl::opt< bool > ClInstrumentFuncEntryExit("tsan-instrument-func-entry-exit", cl::init(true), cl::desc("Instrument function entry and exit"), cl::Hidden)
static bool isAtomic(Instruction *I)
PointerType * getPointerTo(unsigned AddrSpace=0)
IntegerType * getIntPtrTy(const DataLayout *DL, unsigned AddrSpace=0)
Fetch the type representing a pointer to an integer value.
#define INITIALIZE_PASS(passName, arg, name, cfg, analysis)
static CallInst * Create(Value *Func, ArrayRef< Value * > Args, const Twine &NameStr="", Instruction *InsertBefore=0)
Class for constant integers.
static cl::opt< std::string > ClBlacklistFile("tsan-blacklist", cl::desc("Blacklist file"), cl::Hidden)
MDNode * getMetadata(unsigned KindID) const
ConstantInt * getInt32(uint32_t C)
Get a constant 32-bit value.
static IntegerType * getIntNTy(LLVMContext &C, unsigned N)
static ConstantInt * createOrdering(IRBuilder<> *IRB, AtomicOrdering ord)
const BasicBlock & getEntryBlock() const
raw_ostream & dbgs()
dbgs - Return a circular-buffered debug stream.
static Function * checkInterfaceFunction(Constant *FuncOrBitcast)
bool count(const T &V) const
count - Return true if the element is in the set.
bool hasFnAttribute(Attribute::AttrKind Kind) const
Return true if the function has the attribute.
static cl::opt< bool > ClInstrumentMemoryAccesses("tsan-instrument-memory-accesses", cl::init(true), cl::desc("Instrument memory accesses"), cl::Hidden)
static cl::opt< bool > ClInstrumentMemIntrinsics("tsan-instrument-memintrinsics", cl::init(true), cl::desc("Instrument memintrinsics (memset/memcpy/memmove)"), cl::Hidden)
FunctionPass * createThreadSanitizerPass(StringRef BlacklistFile=StringRef())
reverse_iterator rbegin()
LLVM Value Representation.
INITIALIZE_PASS(GlobalMerge,"global-merge","Global Merge", false, false) bool GlobalMerge const DataLayout * TD
static bool isVtableAccess(Instruction *I)
LLVMContext & getContext() const