35 if (A == B)
return true;
42 if (isa<BinaryOperator>(A) || isa<CastInst>(A) ||
43 isa<PHINode>(A) || isa<GetElementPtrInst>(A))
44 if (
const Instruction *BI = dyn_cast<Instruction>(B))
45 if (cast<Instruction>(A)->isIdenticalToWhenDefined(BI))
58 int64_t ByteOffset = 0;
66 unsigned BaseAlign = 0;
67 if (
const AllocaInst *AI = dyn_cast<AllocaInst>(Base)) {
69 BaseType = AI->getAllocatedType();
70 BaseAlign = AI->getAlignment();
71 }
else if (
const GlobalVariable *GV = dyn_cast<GlobalVariable>(Base)) {
74 if (!GV->mayBeOverridden()) {
75 BaseType = GV->getType()->getElementType();
76 BaseAlign = GV->getAlignment();
80 if (BaseType && BaseType->
isSized()) {
81 if (TD && BaseAlign == 0)
84 if (Align <= BaseAlign) {
91 if (ByteOffset + LoadSize <= TD->getTypeAllocSize(BaseType) &&
92 (Align == 0 || (ByteOffset %
Align) == 0))
109 if (isa<CallInst>(BBI) && BBI->mayWriteToMemory() &&
110 !isa<DbgInfoIntrinsic>(BBI))
115 }
else if (
StoreInst *SI = dyn_cast<StoreInst>(BBI)) {
141 unsigned MaxInstsToScan,
144 if (MaxInstsToScan == 0) MaxInstsToScan = ~0U;
147 uint64_t AccessSize = 0;
149 Type *AccessTy = cast<PointerType>(Ptr->
getType())->getElementType();
153 while (ScanFrom != ScanBB->
begin()) {
157 if (isa<DbgInfoIntrinsic>(Inst))
164 if (MaxInstsToScan-- == 0)
return 0;
176 if (
StoreInst *SI = dyn_cast<StoreInst>(Inst)) {
188 if ((isa<AllocaInst>(Ptr) || isa<GlobalVariable>(Ptr)) &&
189 (isa<AllocaInst>(SI->getOperand(1)) ||
190 isa<GlobalVariable>(SI->getOperand(1))))
ModRefResult getModRefInfo(const Instruction *I, const Location &Loc)
unsigned getPrefTypeAlignment(Type *Ty) const
Value * GetPointerBaseWithConstantOffset(Value *Ptr, int64_t &Offset, const DataLayout *TD)
MDNode - a tuple of other values.
bool isSafeToLoadUnconditionally(Value *V, Instruction *ScanFrom, unsigned Align, const DataLayout *TD=0)
LoopInfoBase< BlockT, LoopT > * LI
Value * getOperand(unsigned i) const LLVM_READONLY
getOperand - Return specified operand.
uint64_t getTypeStoreSize(Type *Ty)
Type * getElementType() const
LLVM Basic Block Representation.
bool mayWriteToMemory() const
Value * FindAvailableLoadedValue(Value *Ptr, BasicBlock *ScanBB, BasicBlock::iterator &ScanFrom, unsigned MaxInstsToScan=6, AliasAnalysis *AA=0, MDNode **TBAATag=0)
static bool AreEquivalentAddressValues(const Value *A, const Value *B)
static cl::opt< AlignMode > Align(cl::desc("Load/store alignment support"), cl::Hidden, cl::init(DefaultAlign), cl::values(clEnumValN(DefaultAlign,"arm-default-align","Generate unaligned accesses only on hardware/OS ""combinations that are known to support them"), clEnumValN(StrictAlign,"arm-strict-align","Disallow all unaligned memory accesses"), clEnumValN(NoStrictAlign,"arm-no-strict-align","Allow unaligned memory accesses"), clEnumValEnd))
uint64_t getTypeStoreSize(Type *Ty) const
LLVM Value Representation.
const BasicBlock * getParent() const
INITIALIZE_PASS(GlobalMerge,"global-merge","Global Merge", false, false) bool GlobalMerge const DataLayout * TD