Introduce and use a new MemDepResult class to hold the results of a memdep

query.  This makes it crystal clear what cases can escape from MemDep that
the clients have to handle.  This also gives the clients a nice simplified
interface to it that is easy to poke at.

This patch also makes DepResultTy and MemoryDependenceAnalysis::DepType
private, yay.


git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@60231 91177308-0d34-0410-b5e6-96231b3b80d8
This commit is contained in:
Chris Lattner 2008-11-29 02:29:27 +00:00
parent 39f372e23e
commit 4c72400625
5 changed files with 161 additions and 103 deletions

View File

@ -24,13 +24,62 @@ namespace llvm {
class FunctionPass;
class Instruction;
class CallSite;
/// MemDepResult - A memory dependence query can return one of three different
/// answers:
/// Normal : The query is dependent on a specific instruction.
/// NonLocal: The query does not depend on anything inside this block, but
/// we haven't scanned beyond the block to find out what.
/// None : The query does not depend on anything: we found the entry
/// block or the allocation site of the memory.
class MemDepResult {
enum DepType {
Invalid = 0, Normal, NonLocal, None
};
typedef PointerIntPair<Instruction*, 2, DepType> PairTy;
PairTy Value;
explicit MemDepResult(PairTy V) : Value(V) {}
public:
MemDepResult() : Value(0, Invalid) {}
/// get methods: These are static ctor methods for creating various
/// MemDepResult kinds.
static MemDepResult get(Instruction *Inst) {
return MemDepResult(PairTy(Inst, Normal));
}
static MemDepResult getNonLocal() {
return MemDepResult(PairTy(0, NonLocal));
}
static MemDepResult getNone() {
return MemDepResult(PairTy(0, None));
}
/// isNormal - Return true if this MemDepResult represents a query that is
/// a normal instruction dependency.
bool isNormal() const { return Value.getInt() == Normal; }
/// isNonLocal - Return true if this MemDepResult represents an query that
/// is transparent to the start of the block, but where a non-local hasn't
/// been done.
bool isNonLocal() const { return Value.getInt() == NonLocal; }
/// isNone - Return true if this MemDepResult represents a query that
/// doesn't depend on any instruction.
bool isNone() const { return Value.getInt() == None; }
/// getInst() - If this is a normal dependency, return the instruction that
/// is depended on. Otherwise, return null.
Instruction *getInst() const { return isNormal() ? Value.getPointer() : 0; }
bool operator==(const MemDepResult &M) { return M.Value == Value; }
bool operator!=(const MemDepResult &M) { return M.Value != Value; }
};
/// MemoryDependenceAnalysis - This is an analysis that determines, for a
/// given memory operation, what preceding memory operations it depends on.
/// It builds on alias analysis information, and tries to provide a lazy,
/// caching interface to a common kind of alias information query.
class MemoryDependenceAnalysis : public FunctionPass {
public:
/// DepType - This enum is used to indicate what flavor of dependence this
/// is. If the type is Normal, there is an associated instruction pointer.
enum DepType {
@ -54,7 +103,7 @@ namespace llvm {
Dirty
};
typedef PointerIntPair<Instruction*, 2, DepType> DepResultTy;
private:
// A map from instructions to their dependency, with a boolean
// flags for whether this mapping is confirmed or not.
typedef DenseMap<Instruction*,
@ -62,6 +111,7 @@ namespace llvm {
LocalDepMapType LocalDeps;
// A map from instructions to their non-local dependencies.
// FIXME: DENSEMAP of DENSEMAP not a great idea.
typedef DenseMap<Instruction*,
DenseMap<BasicBlock*, DepResultTy> > nonLocalDepMapType;
nonLocalDepMapType depGraphNonLocal;
@ -98,14 +148,14 @@ namespace llvm {
/// getDependency - Return the instruction on which a memory operation
/// depends, starting with start.
DepResultTy getDependency(Instruction *query, Instruction *start = 0,
BasicBlock *block = 0);
MemDepResult getDependency(Instruction *query, Instruction *start = 0,
BasicBlock *block = 0);
/// getNonLocalDependency - Fills the passed-in map with the non-local
/// dependencies of the queries. The map will contain NonLocal for
/// blocks between the query and its dependencies.
void getNonLocalDependency(Instruction* query,
DenseMap<BasicBlock*, DepResultTy> &resp);
DenseMap<BasicBlock*, MemDepResult> &resp);
/// removeInstruction - Remove an instruction from the dependence analysis,
/// updating the dependence of instructions that previously depended on it.
@ -117,14 +167,33 @@ namespace llvm {
void dropInstruction(Instruction *InstToDrop);
private:
DepResultTy ConvFromResult(MemDepResult R) {
if (Instruction *I = R.getInst())
return DepResultTy(I, Normal);
if (R.isNonLocal())
return DepResultTy(0, NonLocal);
assert(R.isNone() && "Unknown MemDepResult!");
return DepResultTy(0, None);
}
MemDepResult ConvToResult(DepResultTy R) {
if (R.getInt() == Normal)
return MemDepResult::get(R.getPointer());
if (R.getInt() == NonLocal)
return MemDepResult::getNonLocal();
assert(R.getInt() == None && "Unknown MemDepResult!");
return MemDepResult::getNone();
}
/// verifyRemoved - Verify that the specified instruction does not occur
/// in our internal data structures.
void verifyRemoved(Instruction *Inst) const;
DepResultTy getCallSiteDependency(CallSite C, Instruction* start,
BasicBlock* block);
MemDepResult getCallSiteDependency(CallSite C, Instruction* start,
BasicBlock* block);
void nonLocalHelper(Instruction* query, BasicBlock* block,
DenseMap<BasicBlock*, DepResultTy>& resp);
DenseMap<BasicBlock*, DepResultTy> &resp);
};
} // End llvm namespace

View File

@ -87,8 +87,7 @@ void MemoryDependenceAnalysis::getAnalysisUsage(AnalysisUsage &AU) const {
/// getCallSiteDependency - Private helper for finding the local dependencies
/// of a call site.
MemoryDependenceAnalysis::DepResultTy
MemoryDependenceAnalysis::
MemDepResult MemoryDependenceAnalysis::
getCallSiteDependency(CallSite C, Instruction *start, BasicBlock *block) {
std::pair<DepResultTy, bool> &cachedResult = LocalDeps[C.getInstruction()];
AliasAnalysis& AA = getAnalysis<AliasAnalysis>();
@ -140,7 +139,7 @@ getCallSiteDependency(CallSite C, Instruction *start, BasicBlock *block) {
cachedResult.second = true;
reverseDep[DepResultTy(QI, Normal)].insert(C.getInstruction());
}
return DepResultTy(QI, Normal);
return MemDepResult::get(QI);
} else {
continue;
}
@ -153,7 +152,7 @@ getCallSiteDependency(CallSite C, Instruction *start, BasicBlock *block) {
cachedResult.second = true;
reverseDep[DepResultTy(QI, Normal)].insert(C.getInstruction());
}
return DepResultTy(QI, Normal);
return MemDepResult::get(QI);
}
}
@ -161,7 +160,7 @@ getCallSiteDependency(CallSite C, Instruction *start, BasicBlock *block) {
cachedResult.first = DepResultTy(0, NonLocal);
cachedResult.second = true;
reverseDep[DepResultTy(0, NonLocal)].insert(C.getInstruction());
return DepResultTy(0, NonLocal);
return MemDepResult::getNonLocal();
}
/// nonLocalHelper - Private helper used to calculate non-local dependencies
@ -199,11 +198,10 @@ void MemoryDependenceAnalysis::nonLocalHelper(Instruction* query,
if (BB != block) {
visited.insert(BB);
DepResultTy localDep = getDependency(query, 0, BB);
if (localDep.getInt() != NonLocal) {
resp.insert(std::make_pair(BB, localDep));
MemDepResult localDep = getDependency(query, 0, BB);
if (!localDep.isNonLocal()) {
resp.insert(std::make_pair(BB, ConvFromResult(localDep)));
stack.pop_back();
continue;
}
// If we re-encounter the starting block, we still need to search it
@ -212,12 +210,11 @@ void MemoryDependenceAnalysis::nonLocalHelper(Instruction* query,
} else if (BB == block) {
visited.insert(BB);
DepResultTy localDep = getDependency(query, 0, BB);
if (localDep != DepResultTy(query, Normal))
resp.insert(std::make_pair(BB, localDep));
MemDepResult localDep = getDependency(query, 0, BB);
if (localDep.getInst() != query)
resp.insert(std::make_pair(BB, ConvFromResult(localDep)));
stack.pop_back();
continue;
}
@ -257,7 +254,7 @@ void MemoryDependenceAnalysis::nonLocalHelper(Instruction* query,
/// dependencies of the queries. The map will contain NonLocal for
/// blocks between the query and its dependencies.
void MemoryDependenceAnalysis::getNonLocalDependency(Instruction* query,
DenseMap<BasicBlock*, DepResultTy> &resp) {
DenseMap<BasicBlock*, MemDepResult> &resp) {
if (depGraphNonLocal.count(query)) {
DenseMap<BasicBlock*, DepResultTy> &cached = depGraphNonLocal[query];
NumCacheNonlocal++;
@ -270,44 +267,46 @@ void MemoryDependenceAnalysis::getNonLocalDependency(Instruction* query,
for (SmallVector<BasicBlock*, 4>::iterator I = dirtied.begin(),
E = dirtied.end(); I != E; ++I) {
DepResultTy localDep = getDependency(query, 0, *I);
if (localDep.getInt() != NonLocal)
cached[*I] = localDep;
MemDepResult localDep = getDependency(query, 0, *I);
if (!localDep.isNonLocal())
cached[*I] = ConvFromResult(localDep);
else {
cached.erase(*I);
nonLocalHelper(query, *I, cached);
}
}
resp = cached;
// Update the reverse non-local dependency cache
for (DenseMap<BasicBlock*, DepResultTy>::iterator I = resp.begin(),
E = resp.end(); I != E; ++I)
// Update the reverse non-local dependency cache.
for (DenseMap<BasicBlock*, DepResultTy>::iterator I = cached.begin(),
E = cached.end(); I != E; ++I) {
reverseDepNonLocal[I->second].insert(query);
resp[I->first] = ConvToResult(I->second);
}
return;
} else
NumUncacheNonlocal++;
}
NumUncacheNonlocal++;
// If not, go ahead and search for non-local deps.
nonLocalHelper(query, query->getParent(), resp);
DenseMap<BasicBlock*, DepResultTy> &cached = depGraphNonLocal[query];
nonLocalHelper(query, query->getParent(), cached);
// Update the non-local dependency cache
for (DenseMap<BasicBlock*, DepResultTy>::iterator I = resp.begin(),
E = resp.end(); I != E; ++I) {
depGraphNonLocal[query].insert(*I);
for (DenseMap<BasicBlock*, DepResultTy>::iterator I = cached.begin(),
E = cached.end(); I != E; ++I) {
// FIXME: Merge with the code above!
reverseDepNonLocal[I->second].insert(query);
resp[I->first] = ConvToResult(I->second);
}
}
/// getDependency - Return the instruction on which a memory operation
/// depends. The local parameter indicates if the query should only
/// evaluate dependencies within the same basic block.
MemoryDependenceAnalysis::DepResultTy
MemoryDependenceAnalysis::getDependency(Instruction *query,
Instruction *start,
BasicBlock *block) {
MemDepResult MemoryDependenceAnalysis::getDependency(Instruction *query,
Instruction *start,
BasicBlock *block) {
// Start looking for dependencies with the queried inst
BasicBlock::iterator QI = query;
@ -316,7 +315,7 @@ MemoryDependenceAnalysis::getDependency(Instruction *query,
// If we have a _confirmed_ cached entry, return it
if (!block && !start) {
if (cachedResult.second)
return cachedResult.first;
return ConvToResult(cachedResult.first);
else if (cachedResult.first.getInt() == Normal &&
cachedResult.first.getPointer())
// If we have an unconfirmed cached entry, we can start our search from
@ -355,9 +354,9 @@ MemoryDependenceAnalysis::getDependency(Instruction *query,
} else if (CallSite::get(query).getInstruction() != 0)
return getCallSiteDependency(CallSite::get(query), start, block);
else if (isa<AllocationInst>(query))
return DepResultTy(0, None);
return MemDepResult::getNone();
else
return DepResultTy(0, None);
return MemDepResult::getNone();
BasicBlock::iterator blockBegin = block ? block->begin()
: query->getParent()->begin();
@ -378,7 +377,7 @@ MemoryDependenceAnalysis::getDependency(Instruction *query,
reverseDep[DepResultTy(S, Normal)].insert(query);
}
return DepResultTy(S, Normal);
return MemDepResult::get(S);
}
pointer = S->getPointerOperand();
@ -392,7 +391,7 @@ MemoryDependenceAnalysis::getDependency(Instruction *query,
reverseDep[DepResultTy(L, Normal)].insert(query);
}
return DepResultTy(L, Normal);
return MemDepResult::get(L);
}
pointer = L->getPointerOperand();
@ -427,7 +426,7 @@ MemoryDependenceAnalysis::getDependency(Instruction *query,
cachedResult.second = true;
reverseDep[DepResultTy(QI, Normal)].insert(query);
}
return DepResultTy(QI, Normal);
return MemDepResult::get(QI);
} else {
continue;
}
@ -450,7 +449,7 @@ MemoryDependenceAnalysis::getDependency(Instruction *query,
reverseDep[DepResultTy(QI, Normal)].insert(query);
}
return DepResultTy(QI, Normal);
return MemDepResult::get(QI);
}
}
}
@ -462,7 +461,7 @@ MemoryDependenceAnalysis::getDependency(Instruction *query,
reverseDep[DepResultTy(0, NonLocal)].insert(query);
}
return DepResultTy(0, NonLocal);
return MemDepResult::getNonLocal();
}
/// dropInstruction - Remove an instruction from the analysis, making

View File

@ -47,10 +47,8 @@ namespace {
return Changed;
}
typedef MemoryDependenceAnalysis::DepResultTy DepResultTy;
bool runOnBasicBlock(BasicBlock &BB);
bool handleFreeWithNonTrivialDependency(FreeInst *F, DepResultTy Dep);
bool handleFreeWithNonTrivialDependency(FreeInst *F, MemDepResult Dep);
bool handleEndBlock(BasicBlock &BB);
bool RemoveUndeadPointers(Value* pointer, uint64_t killPointerSize,
BasicBlock::iterator& BBI,
@ -110,16 +108,15 @@ bool DSE::runOnBasicBlock(BasicBlock &BB) {
// ... to a pointer that has been stored to before...
if (last) {
DepResultTy dep = MD.getDependency(Inst);
MemDepResult dep = MD.getDependency(Inst);
bool deletedStore = false;
// ... and no other memory dependencies are between them....
while (dep.getInt() == MemoryDependenceAnalysis::Normal &&
isa<StoreInst>(dep.getPointer())) {
if (dep.getPointer() != last ||
TD.getTypeStoreSize(last->getOperand(0)->getType()) >
TD.getTypeStoreSize(Inst->getOperand(0)->getType())) {
dep = MD.getDependency(Inst, dep.getPointer());
while (StoreInst *DepStore = dyn_cast_or_null<StoreInst>(dep.getInst())) {
if (DepStore != last ||
TD.getTypeStoreSize(last->getOperand(0)->getType()) >
TD.getTypeStoreSize(Inst->getOperand(0)->getType())) {
dep = MD.getDependency(Inst, DepStore);
continue;
}
@ -152,14 +149,12 @@ bool DSE::runOnBasicBlock(BasicBlock &BB) {
// loaded from, then the store can be removed;
if (LoadInst* L = dyn_cast<LoadInst>(S->getOperand(0))) {
// FIXME: Don't do dep query if Parents don't match and other stuff!
DepResultTy dep = MD.getDependency(S);
MemDepResult dep = MD.getDependency(S);
DominatorTree& DT = getAnalysis<DominatorTree>();
if (!S->isVolatile() && S->getParent() == L->getParent() &&
S->getPointerOperand() == L->getPointerOperand() &&
(dep.getInt() == MemoryDependenceAnalysis::None ||
dep.getInt() == MemoryDependenceAnalysis::NonLocal ||
DT.dominates(dep.getPointer(), L))) {
(!dep.isNormal() || DT.dominates(dep.getInst(), L))) {
DeleteDeadInstruction(S);
if (!isa<TerminatorInst>(BB.begin()))
@ -185,15 +180,11 @@ bool DSE::runOnBasicBlock(BasicBlock &BB) {
/// handleFreeWithNonTrivialDependency - Handle frees of entire structures whose
/// dependency is a store to a field of that structure.
bool DSE::handleFreeWithNonTrivialDependency(FreeInst* F, DepResultTy dep) {
bool DSE::handleFreeWithNonTrivialDependency(FreeInst* F, MemDepResult dep) {
TargetData &TD = getAnalysis<TargetData>();
AliasAnalysis &AA = getAnalysis<AliasAnalysis>();
if (dep.getInt() == MemoryDependenceAnalysis::None ||
dep.getInt() == MemoryDependenceAnalysis::NonLocal)
return false;
StoreInst* dependency = dyn_cast<StoreInst>(dep.getPointer());
StoreInst* dependency = dyn_cast_or_null<StoreInst>(dep.getInst());
if (!dependency)
return false;
else if (dependency->isVolatile())

View File

@ -456,19 +456,19 @@ uint32_t ValueTable::lookup_or_add(Value* V) {
return nextValueNumber++;
}
MemoryDependenceAnalysis::DepResultTy local_dep = MD->getDependency(C);
MemDepResult local_dep = MD->getDependency(C);
if (local_dep.getInt() == MemoryDependenceAnalysis::None) {
if (local_dep.isNone()) {
valueNumbering.insert(std::make_pair(V, nextValueNumber));
return nextValueNumber++;
} else if (local_dep.getInt() != MemoryDependenceAnalysis::NonLocal) {
} else if (Instruction *LocalDepInst = local_dep.getInst()) {
// FIXME: INDENT PROPERLY!
if (!isa<CallInst>(local_dep.getPointer())) {
if (!isa<CallInst>(LocalDepInst)) {
valueNumbering.insert(std::make_pair(V, nextValueNumber));
return nextValueNumber++;
}
CallInst* local_cdep = cast<CallInst>(local_dep.getPointer());
CallInst* local_cdep = cast<CallInst>(LocalDepInst);
// FIXME: INDENT PROPERLY.
if (local_cdep->getCalledFunction() != C->getCalledFunction() ||
@ -495,20 +495,21 @@ uint32_t ValueTable::lookup_or_add(Value* V) {
}
DenseMap<BasicBlock*, MemoryDependenceAnalysis::DepResultTy> deps;
DenseMap<BasicBlock*, MemDepResult> deps;
MD->getNonLocalDependency(C, deps);
CallInst* cdep = 0;
for (DenseMap<BasicBlock*, MemoryDependenceAnalysis::DepResultTy>
for (DenseMap<BasicBlock*, MemDepResult>
::iterator I = deps.begin(), E = deps.end(); I != E; ++I) {
if (I->second.getInt() == MemoryDependenceAnalysis::None) {
if (I->second.isNone()) {
valueNumbering.insert(std::make_pair(V, nextValueNumber));
return nextValueNumber++;
} else if (I->second.getInt() != MemoryDependenceAnalysis::NonLocal) {
} else if (Instruction *NonLocalDepInst = I->second.getInst()) {
// FIXME: INDENT PROPERLY
// FIXME: All duplicated with non-local case.
if (DT->properlyDominates(I->first, C->getParent())) {
if (CallInst* CD = dyn_cast<CallInst>(I->second.getPointer()))
if (CallInst* CD = dyn_cast<CallInst>(NonLocalDepInst))
cdep = CD;
else {
valueNumbering.insert(std::make_pair(V, nextValueNumber));
@ -721,8 +722,6 @@ namespace {
AU.addPreserved<AliasAnalysis>();
}
typedef MemoryDependenceAnalysis::DepResultTy DepResultTy;
// Helper fuctions
// FIXME: eliminate or document these better
bool processLoad(LoadInst* L,
@ -866,7 +865,7 @@ bool GVN::processNonLocalLoad(LoadInst* L,
MemoryDependenceAnalysis& MD = getAnalysis<MemoryDependenceAnalysis>();
// Find the non-local dependencies of the load
DenseMap<BasicBlock*, DepResultTy> deps;
DenseMap<BasicBlock*, MemDepResult> deps;
MD.getNonLocalDependency(L, deps);
// If we had to process more than one hundred blocks to find the
@ -878,19 +877,19 @@ bool GVN::processNonLocalLoad(LoadInst* L,
DenseMap<BasicBlock*, Value*> repl;
// Filter out useless results (non-locals, etc)
for (DenseMap<BasicBlock*, DepResultTy>::iterator I = deps.begin(),
for (DenseMap<BasicBlock*, MemDepResult>::iterator I = deps.begin(),
E = deps.end(); I != E; ++I) {
if (I->second.getInt() == MemoryDependenceAnalysis::None)
if (I->second.isNone())
return false;
if (I->second.getInt() == MemoryDependenceAnalysis::NonLocal)
if (I->second.isNonLocal())
continue;
if (StoreInst* S = dyn_cast<StoreInst>(I->second.getPointer())) {
if (StoreInst* S = dyn_cast<StoreInst>(I->second.getInst())) {
if (S->getPointerOperand() != L->getPointerOperand())
return false;
repl[I->first] = S->getOperand(0);
} else if (LoadInst* LD = dyn_cast<LoadInst>(I->second.getPointer())) {
} else if (LoadInst* LD = dyn_cast<LoadInst>(I->second.getInst())) {
if (LD->getPointerOperand() != L->getPointerOperand())
return false;
repl[I->first] = LD;
@ -941,8 +940,8 @@ bool GVN::processLoad(LoadInst *L, DenseMap<Value*, LoadInst*> &lastLoad,
// ... to a pointer that has been loaded from before...
MemoryDependenceAnalysis& MD = getAnalysis<MemoryDependenceAnalysis>();
bool removedNonLocal = false;
DepResultTy dep = MD.getDependency(L);
if (dep.getInt() == MemoryDependenceAnalysis::NonLocal &&
MemDepResult dep = MD.getDependency(L);
if (dep.isNonLocal() &&
L->getParent() != &L->getParent()->getParent()->getEntryBlock()) {
removedNonLocal = processNonLocalLoad(L, toErase);
@ -957,10 +956,9 @@ bool GVN::processLoad(LoadInst *L, DenseMap<Value*, LoadInst*> &lastLoad,
// Walk up the dependency chain until we either find
// a dependency we can use, or we can't walk any further
while (dep.getInt() == MemoryDependenceAnalysis::Normal &&
(isa<LoadInst>(dep.getPointer()) || isa<StoreInst>(dep.getPointer()))){
while (Instruction *DepInst = dep.getInst()) {
// ... that depends on a store ...
if (StoreInst* S = dyn_cast<StoreInst>(dep.getPointer())) {
if (StoreInst* S = dyn_cast<StoreInst>(DepInst)) {
if (S->getPointerOperand() == pointer) {
// Remove it!
MD.removeInstruction(L);
@ -974,11 +972,14 @@ bool GVN::processLoad(LoadInst *L, DenseMap<Value*, LoadInst*> &lastLoad,
// Whether we removed it or not, we can't
// go any further
break;
} else if (!isa<LoadInst>(DepInst)) {
// Only want to handle loads below.
break;
} else if (!last) {
// If we don't depend on a store, and we haven't
// been loaded before, bail.
break;
} else if (dep.getPointer() == last) {
} else if (DepInst == last) {
// Remove it!
MD.removeInstruction(L);
@ -989,15 +990,14 @@ bool GVN::processLoad(LoadInst *L, DenseMap<Value*, LoadInst*> &lastLoad,
break;
} else {
dep = MD.getDependency(L, dep.getPointer());
dep = MD.getDependency(L, DepInst);
}
}
if (dep.getInt() == MemoryDependenceAnalysis::Normal &&
isa<AllocationInst>(dep.getPointer())) {
if (AllocationInst *DepAI = dyn_cast_or_null<AllocationInst>(dep.getInst())) {
// Check that this load is actually from the
// allocation we found
if (L->getOperand(0)->getUnderlyingObject() == dep.getPointer()) {
if (L->getOperand(0)->getUnderlyingObject() == DepAI) {
// If this load depends directly on an allocation, there isn't
// anything stored there; therefore, we can optimize this load
// to undef.

View File

@ -629,18 +629,17 @@ bool MemCpyOpt::processMemCpy(MemCpyInst* M) {
// The are two possible optimizations we can do for memcpy:
// a) memcpy-memcpy xform which exposes redundance for DSE
// b) call-memcpy xform for return slot optimization
MemoryDependenceAnalysis::DepResultTy dep = MD.getDependency(M);
if (dep.getInt() == MemoryDependenceAnalysis::None ||
dep.getInt() == MemoryDependenceAnalysis::NonLocal)
MemDepResult dep = MD.getDependency(M);
if (!dep.isNormal())
return false;
else if (!isa<MemCpyInst>(dep.getPointer())) {
if (CallInst* C = dyn_cast<CallInst>(dep.getPointer()))
else if (!isa<MemCpyInst>(dep.getInst())) {
if (CallInst* C = dyn_cast<CallInst>(dep.getInst()))
return performCallSlotOptzn(M, C);
else
return false;
}
MemCpyInst* MDep = cast<MemCpyInst>(dep.getPointer());
MemCpyInst* MDep = cast<MemCpyInst>(dep.getInst());
// We can only transforms memcpy's where the dest of one is the source of the
// other