summaryrefslogtreecommitdiffstats
path: root/gnu/llvm/lib/Analysis/GlobalsModRef.cpp
diff options
context:
space:
mode:
authorpatrick <patrick@openbsd.org>2019-01-27 16:42:12 +0000
committerpatrick <patrick@openbsd.org>2019-01-27 16:42:12 +0000
commitb773203fb58f3ef282fb69c832d8710cab5bc82d (patch)
treee75913f147570fbd75169647b144df85b88a038c /gnu/llvm/lib/Analysis/GlobalsModRef.cpp
parenttweak errno in previous (diff)
downloadwireguard-openbsd-b773203fb58f3ef282fb69c832d8710cab5bc82d.tar.xz
wireguard-openbsd-b773203fb58f3ef282fb69c832d8710cab5bc82d.zip
Import LLVM 7.0.1 release including clang, lld and lldb.
Diffstat (limited to 'gnu/llvm/lib/Analysis/GlobalsModRef.cpp')
-rw-r--r--gnu/llvm/lib/Analysis/GlobalsModRef.cpp20
1 files changed, 13 insertions, 7 deletions
diff --git a/gnu/llvm/lib/Analysis/GlobalsModRef.cpp b/gnu/llvm/lib/Analysis/GlobalsModRef.cpp
index daee93267f5..2c503609d96 100644
--- a/gnu/llvm/lib/Analysis/GlobalsModRef.cpp
+++ b/gnu/llvm/lib/Analysis/GlobalsModRef.cpp
@@ -65,7 +65,7 @@ class GlobalsAAResult::FunctionInfo {
/// Build a wrapper struct that has 8-byte alignment. All heap allocations
/// should provide this much alignment at least, but this makes it clear we
/// specifically rely on this amount of alignment.
- struct LLVM_ALIGNAS(8) AlignedMap {
+ struct alignas(8) AlignedMap {
AlignedMap() {}
AlignedMap(const AlignedMap &Arg) : Map(Arg.Map) {}
GlobalInfoMapType Map;
@@ -409,7 +409,7 @@ bool GlobalsAAResult::AnalyzeIndirectGlobalMemory(GlobalVariable *GV) {
if (Constant *C = GV->getInitializer())
if (!C->isNullValue())
return false;
-
+
// Walk the user list of the global. If we find anything other than a direct
// load or store, bail out.
for (User *U : GV->users()) {
@@ -464,7 +464,7 @@ bool GlobalsAAResult::AnalyzeIndirectGlobalMemory(GlobalVariable *GV) {
return true;
}
-void GlobalsAAResult::CollectSCCMembership(CallGraph &CG) {
+void GlobalsAAResult::CollectSCCMembership(CallGraph &CG) {
// We do a bottom-up SCC traversal of the call graph. In other words, we
// visit all callees before callers (leaf-first).
unsigned SCCID = 0;
@@ -502,6 +502,8 @@ void GlobalsAAResult::AnalyzeCallGraph(CallGraph &CG, Module &M) {
}
FunctionInfo &FI = FunctionInfos[F];
+ Handles.emplace_front(*this, F);
+ Handles.front().I = Handles.begin();
bool KnowNothing = false;
// Collect the mod/ref properties due to called functions. We only compute
@@ -582,6 +584,10 @@ void GlobalsAAResult::AnalyzeCallGraph(CallGraph &CG, Module &M) {
} else if (Function *Callee = CS.getCalledFunction()) {
// The callgraph doesn't include intrinsic calls.
if (Callee->isIntrinsic()) {
+ if (isa<DbgInfoIntrinsic>(I))
+ // Don't let dbg intrinsics affect alias info.
+ continue;
+
FunctionModRefBehavior Behaviour =
AAResultBase::getModRefBehavior(Callee);
FI.addModRefInfo(createModRefInfo(Behaviour));
@@ -627,7 +633,7 @@ static bool isNonEscapingGlobalNoAliasWithLoad(const GlobalValue *GV,
Inputs.push_back(V);
do {
const Value *Input = Inputs.pop_back_val();
-
+
if (isa<GlobalValue>(Input) || isa<Argument>(Input) || isa<CallInst>(Input) ||
isa<InvokeInst>(Input))
// Arguments to functions or returns from functions are inherently
@@ -648,7 +654,7 @@ static bool isNonEscapingGlobalNoAliasWithLoad(const GlobalValue *GV,
if (auto *LI = dyn_cast<LoadInst>(Input)) {
Inputs.push_back(GetUnderlyingObject(LI->getPointerOperand(), DL));
continue;
- }
+ }
if (auto *SI = dyn_cast<SelectInst>(Input)) {
const Value *LHS = GetUnderlyingObject(SI->getTrueValue(), DL);
const Value *RHS = GetUnderlyingObject(SI->getFalseValue(), DL);
@@ -666,7 +672,7 @@ static bool isNonEscapingGlobalNoAliasWithLoad(const GlobalValue *GV,
}
continue;
}
-
+
return false;
} while (!Inputs.empty());
@@ -748,7 +754,7 @@ bool GlobalsAAResult::isNonEscapingGlobalNoAlias(const GlobalValue *GV,
// non-addr-taken globals.
continue;
}
-
+
// Recurse through a limited number of selects, loads and PHIs. This is an
// arbitrary depth of 4, lower numbers could be used to fix compile time
// issues if needed, but this is generally expected to be only be important