aboutsummaryrefslogtreecommitdiffstats
path: root/contrib/libs/llvm12/lib/Transforms/Scalar/InferAddressSpaces.cpp
diff options
context:
space:
mode:
authorshadchin <shadchin@yandex-team.ru>2022-02-10 16:44:30 +0300
committerDaniil Cherednik <dcherednik@yandex-team.ru>2022-02-10 16:44:30 +0300
commit2598ef1d0aee359b4b6d5fdd1758916d5907d04f (patch)
tree012bb94d777798f1f56ac1cec429509766d05181 /contrib/libs/llvm12/lib/Transforms/Scalar/InferAddressSpaces.cpp
parent6751af0b0c1b952fede40b19b71da8025b5d8bcf (diff)
downloadydb-2598ef1d0aee359b4b6d5fdd1758916d5907d04f.tar.gz
Restoring authorship annotation for <shadchin@yandex-team.ru>. Commit 1 of 2.
Diffstat (limited to 'contrib/libs/llvm12/lib/Transforms/Scalar/InferAddressSpaces.cpp')
-rw-r--r--contrib/libs/llvm12/lib/Transforms/Scalar/InferAddressSpaces.cpp228
1 files changed, 114 insertions, 114 deletions
diff --git a/contrib/libs/llvm12/lib/Transforms/Scalar/InferAddressSpaces.cpp b/contrib/libs/llvm12/lib/Transforms/Scalar/InferAddressSpaces.cpp
index 332eb10ac1..9127f3c2e0 100644
--- a/contrib/libs/llvm12/lib/Transforms/Scalar/InferAddressSpaces.cpp
+++ b/contrib/libs/llvm12/lib/Transforms/Scalar/InferAddressSpaces.cpp
@@ -88,7 +88,7 @@
//
//===----------------------------------------------------------------------===//
-#include "llvm/Transforms/Scalar/InferAddressSpaces.h"
+#include "llvm/Transforms/Scalar/InferAddressSpaces.h"
#include "llvm/ADT/ArrayRef.h"
#include "llvm/ADT/DenseMap.h"
#include "llvm/ADT/DenseSet.h"
@@ -109,7 +109,7 @@
#include "llvm/IR/Intrinsics.h"
#include "llvm/IR/LLVMContext.h"
#include "llvm/IR/Operator.h"
-#include "llvm/IR/PassManager.h"
+#include "llvm/IR/PassManager.h"
#include "llvm/IR/Type.h"
#include "llvm/IR/Use.h"
#include "llvm/IR/User.h"
@@ -164,16 +164,16 @@ public:
}
bool runOnFunction(Function &F) override;
-};
-
-class InferAddressSpacesImpl {
- const TargetTransformInfo *TTI = nullptr;
- const DataLayout *DL = nullptr;
-
- /// Target specific address space which uses of should be replaced if
- /// possible.
- unsigned FlatAddrSpace = 0;
-
+};
+
+class InferAddressSpacesImpl {
+ const TargetTransformInfo *TTI = nullptr;
+ const DataLayout *DL = nullptr;
+
+ /// Target specific address space which uses of should be replaced if
+ /// possible.
+ unsigned FlatAddrSpace = 0;
+
// Returns the new address space of V if updated; otherwise, returns None.
Optional<unsigned>
updateAddressSpace(const Value &V,
@@ -215,11 +215,11 @@ class InferAddressSpacesImpl {
const ValueToValueMapTy &ValueWithNewAddrSpace,
SmallVectorImpl<const Use *> *UndefUsesToFix) const;
unsigned joinAddressSpaces(unsigned AS1, unsigned AS2) const;
-
-public:
- InferAddressSpacesImpl(const TargetTransformInfo *TTI, unsigned FlatAddrSpace)
- : TTI(TTI), FlatAddrSpace(FlatAddrSpace) {}
- bool run(Function &F);
+
+public:
+ InferAddressSpacesImpl(const TargetTransformInfo *TTI, unsigned FlatAddrSpace)
+ : TTI(TTI), FlatAddrSpace(FlatAddrSpace) {}
+ bool run(Function &F);
};
} // end anonymous namespace
@@ -295,8 +295,8 @@ static bool isAddressExpression(const Value &V, const DataLayout &DL,
case Instruction::IntToPtr:
return isNoopPtrIntCastPair(Op, DL, TTI);
default:
- // That value is an address expression if it has an assumed address space.
- return TTI->getAssumedAddrSpace(&V) != UninitializedAddressSpace;
+ // That value is an address expression if it has an assumed address space.
+ return TTI->getAssumedAddrSpace(&V) != UninitializedAddressSpace;
}
}
@@ -335,9 +335,9 @@ getPointerOperands(const Value &V, const DataLayout &DL,
}
}
-bool InferAddressSpacesImpl::rewriteIntrinsicOperands(IntrinsicInst *II,
- Value *OldV,
- Value *NewV) const {
+bool InferAddressSpacesImpl::rewriteIntrinsicOperands(IntrinsicInst *II,
+ Value *OldV,
+ Value *NewV) const {
Module *M = II->getParent()->getParent()->getParent();
switch (II->getIntrinsicID()) {
@@ -364,7 +364,7 @@ bool InferAddressSpacesImpl::rewriteIntrinsicOperands(IntrinsicInst *II,
}
}
-void InferAddressSpacesImpl::collectRewritableIntrinsicOperands(
+void InferAddressSpacesImpl::collectRewritableIntrinsicOperands(
IntrinsicInst *II, PostorderStackTy &PostorderStack,
DenseSet<Value *> &Visited) const {
auto IID = II->getIntrinsicID();
@@ -389,7 +389,7 @@ void InferAddressSpacesImpl::collectRewritableIntrinsicOperands(
// Returns all flat address expressions in function F. The elements are
// If V is an unvisited flat address expression, appends V to PostorderStack
// and marks it as visited.
-void InferAddressSpacesImpl::appendsFlatAddressExpressionToPostorderStack(
+void InferAddressSpacesImpl::appendsFlatAddressExpressionToPostorderStack(
Value *V, PostorderStackTy &PostorderStack,
DenseSet<Value *> &Visited) const {
assert(V->getType()->isPointerTy());
@@ -404,8 +404,8 @@ void InferAddressSpacesImpl::appendsFlatAddressExpressionToPostorderStack(
return;
}
- if (V->getType()->getPointerAddressSpace() == FlatAddrSpace &&
- isAddressExpression(*V, *DL, TTI)) {
+ if (V->getType()->getPointerAddressSpace() == FlatAddrSpace &&
+ isAddressExpression(*V, *DL, TTI)) {
if (Visited.insert(V).second) {
PostorderStack.emplace_back(V, false);
@@ -423,7 +423,7 @@ void InferAddressSpacesImpl::appendsFlatAddressExpressionToPostorderStack(
// Returns all flat address expressions in function F. The elements are ordered
// ordered in postorder.
std::vector<WeakTrackingVH>
-InferAddressSpacesImpl::collectFlatAddressExpressions(Function &F) const {
+InferAddressSpacesImpl::collectFlatAddressExpressions(Function &F) const {
// This function implements a non-recursive postorder traversal of a partial
// use-def graph of function F.
PostorderStackTy PostorderStack;
@@ -488,12 +488,12 @@ InferAddressSpacesImpl::collectFlatAddressExpressions(Function &F) const {
}
// Otherwise, adds its operands to the stack and explores them.
PostorderStack.back().setInt(true);
- // Skip values with an assumed address space.
- if (TTI->getAssumedAddrSpace(TopVal) == UninitializedAddressSpace) {
- for (Value *PtrOperand : getPointerOperands(*TopVal, *DL, TTI)) {
- appendsFlatAddressExpressionToPostorderStack(PtrOperand, PostorderStack,
- Visited);
- }
+ // Skip values with an assumed address space.
+ if (TTI->getAssumedAddrSpace(TopVal) == UninitializedAddressSpace) {
+ for (Value *PtrOperand : getPointerOperands(*TopVal, *DL, TTI)) {
+ appendsFlatAddressExpressionToPostorderStack(PtrOperand, PostorderStack,
+ Visited);
+ }
}
}
return Postorder;
@@ -533,7 +533,7 @@ static Value *operandWithNewAddressSpaceOrCreateUndef(
//
// This may also return nullptr in the case the instruction could not be
// rewritten.
-Value *InferAddressSpacesImpl::cloneInstructionWithNewAddressSpace(
+Value *InferAddressSpacesImpl::cloneInstructionWithNewAddressSpace(
Instruction *I, unsigned NewAddrSpace,
const ValueToValueMapTy &ValueWithNewAddrSpace,
SmallVectorImpl<const Use *> *UndefUsesToFix) const {
@@ -568,16 +568,16 @@ Value *InferAddressSpacesImpl::cloneInstructionWithNewAddressSpace(
return nullptr;
}
- unsigned AS = TTI->getAssumedAddrSpace(I);
- if (AS != UninitializedAddressSpace) {
- // For the assumed address space, insert an `addrspacecast` to make that
- // explicit.
- auto *NewPtrTy = I->getType()->getPointerElementType()->getPointerTo(AS);
- auto *NewI = new AddrSpaceCastInst(I, NewPtrTy);
- NewI->insertAfter(I);
- return NewI;
- }
-
+ unsigned AS = TTI->getAssumedAddrSpace(I);
+ if (AS != UninitializedAddressSpace) {
+ // For the assumed address space, insert an `addrspacecast` to make that
+ // explicit.
+ auto *NewPtrTy = I->getType()->getPointerElementType()->getPointerTo(AS);
+ auto *NewI = new AddrSpaceCastInst(I, NewPtrTy);
+ NewI->insertAfter(I);
+ return NewI;
+ }
+
// Computes the converted pointer operands.
SmallVector<Value *, 4> NewPointerOperands;
for (const Use &OperandUse : I->operands()) {
@@ -606,7 +606,7 @@ Value *InferAddressSpacesImpl::cloneInstructionWithNewAddressSpace(
GetElementPtrInst *GEP = cast<GetElementPtrInst>(I);
GetElementPtrInst *NewGEP = GetElementPtrInst::Create(
GEP->getSourceElementType(), NewPointerOperands[0],
- SmallVector<Value *, 4>(GEP->indices()));
+ SmallVector<Value *, 4>(GEP->indices()));
NewGEP->setIsInBounds(GEP->isInBounds());
return NewGEP;
}
@@ -718,13 +718,13 @@ static Value *cloneConstantExprWithNewAddressSpace(
// expression whose address space needs to be modified, in postorder.
//
// See cloneInstructionWithNewAddressSpace for the meaning of UndefUsesToFix.
-Value *InferAddressSpacesImpl::cloneValueWithNewAddressSpace(
- Value *V, unsigned NewAddrSpace,
- const ValueToValueMapTy &ValueWithNewAddrSpace,
- SmallVectorImpl<const Use *> *UndefUsesToFix) const {
+Value *InferAddressSpacesImpl::cloneValueWithNewAddressSpace(
+ Value *V, unsigned NewAddrSpace,
+ const ValueToValueMapTy &ValueWithNewAddrSpace,
+ SmallVectorImpl<const Use *> *UndefUsesToFix) const {
// All values in Postorder are flat address expressions.
- assert(V->getType()->getPointerAddressSpace() == FlatAddrSpace &&
- isAddressExpression(*V, *DL, TTI));
+ assert(V->getType()->getPointerAddressSpace() == FlatAddrSpace &&
+ isAddressExpression(*V, *DL, TTI));
if (Instruction *I = dyn_cast<Instruction>(V)) {
Value *NewV = cloneInstructionWithNewAddressSpace(
@@ -744,8 +744,8 @@ Value *InferAddressSpacesImpl::cloneValueWithNewAddressSpace(
// Defines the join operation on the address space lattice (see the file header
// comments).
-unsigned InferAddressSpacesImpl::joinAddressSpaces(unsigned AS1,
- unsigned AS2) const {
+unsigned InferAddressSpacesImpl::joinAddressSpaces(unsigned AS1,
+ unsigned AS2) const {
if (AS1 == FlatAddrSpace || AS2 == FlatAddrSpace)
return FlatAddrSpace;
@@ -758,7 +758,7 @@ unsigned InferAddressSpacesImpl::joinAddressSpaces(unsigned AS1,
return (AS1 == AS2) ? AS1 : FlatAddrSpace;
}
-bool InferAddressSpacesImpl::run(Function &F) {
+bool InferAddressSpacesImpl::run(Function &F) {
DL = &F.getParent()->getDataLayout();
if (AssumeDefaultIsFlatAddressSpace)
@@ -785,7 +785,7 @@ bool InferAddressSpacesImpl::run(Function &F) {
// Constants need to be tracked through RAUW to handle cases with nested
// constant expressions, so wrap values in WeakTrackingVH.
-void InferAddressSpacesImpl::inferAddressSpaces(
+void InferAddressSpacesImpl::inferAddressSpaces(
ArrayRef<WeakTrackingVH> Postorder,
ValueToAddrSpaceMapTy *InferredAddrSpace) const {
SetVector<Value *> Worklist(Postorder.begin(), Postorder.end());
@@ -829,7 +829,7 @@ void InferAddressSpacesImpl::inferAddressSpaces(
}
}
-Optional<unsigned> InferAddressSpacesImpl::updateAddressSpace(
+Optional<unsigned> InferAddressSpacesImpl::updateAddressSpace(
const Value &V, const ValueToAddrSpaceMapTy &InferredAddrSpace) const {
assert(InferredAddrSpace.count(&V));
@@ -867,24 +867,24 @@ Optional<unsigned> InferAddressSpacesImpl::updateAddressSpace(
else
NewAS = joinAddressSpaces(Src0AS, Src1AS);
} else {
- unsigned AS = TTI->getAssumedAddrSpace(&V);
- if (AS != UninitializedAddressSpace) {
- // Use the assumed address space directly.
- NewAS = AS;
- } else {
- // Otherwise, infer the address space from its pointer operands.
- for (Value *PtrOperand : getPointerOperands(V, *DL, TTI)) {
- auto I = InferredAddrSpace.find(PtrOperand);
- unsigned OperandAS =
- I != InferredAddrSpace.end()
- ? I->second
- : PtrOperand->getType()->getPointerAddressSpace();
-
- // join(flat, *) = flat. So we can break if NewAS is already flat.
- NewAS = joinAddressSpaces(NewAS, OperandAS);
- if (NewAS == FlatAddrSpace)
- break;
- }
+ unsigned AS = TTI->getAssumedAddrSpace(&V);
+ if (AS != UninitializedAddressSpace) {
+ // Use the assumed address space directly.
+ NewAS = AS;
+ } else {
+ // Otherwise, infer the address space from its pointer operands.
+ for (Value *PtrOperand : getPointerOperands(V, *DL, TTI)) {
+ auto I = InferredAddrSpace.find(PtrOperand);
+ unsigned OperandAS =
+ I != InferredAddrSpace.end()
+ ? I->second
+ : PtrOperand->getType()->getPointerAddressSpace();
+
+ // join(flat, *) = flat. So we can break if NewAS is already flat.
+ NewAS = joinAddressSpaces(NewAS, OperandAS);
+ if (NewAS == FlatAddrSpace)
+ break;
+ }
}
}
@@ -975,8 +975,8 @@ static bool handleMemIntrinsicPtrUse(MemIntrinsic *MI, Value *OldV,
// \p returns true if it is OK to change the address space of constant \p C with
// a ConstantExpr addrspacecast.
-bool InferAddressSpacesImpl::isSafeToCastConstAddrSpace(Constant *C,
- unsigned NewAS) const {
+bool InferAddressSpacesImpl::isSafeToCastConstAddrSpace(Constant *C,
+ unsigned NewAS) const {
assert(NewAS != UninitializedAddressSpace);
unsigned SrcAS = C->getType()->getPointerAddressSpace();
@@ -1015,7 +1015,7 @@ static Value::use_iterator skipToNextUser(Value::use_iterator I,
return I;
}
-bool InferAddressSpacesImpl::rewriteWithNewAddressSpaces(
+bool InferAddressSpacesImpl::rewriteWithNewAddressSpaces(
const TargetTransformInfo &TTI, ArrayRef<WeakTrackingVH> Postorder,
const ValueToAddrSpaceMapTy &InferredAddrSpace, Function *F) const {
// For each address expression to be modified, creates a clone of it with its
@@ -1026,12 +1026,12 @@ bool InferAddressSpacesImpl::rewriteWithNewAddressSpaces(
SmallVector<const Use *, 32> UndefUsesToFix;
for (Value* V : Postorder) {
unsigned NewAddrSpace = InferredAddrSpace.lookup(V);
-
- // In some degenerate cases (e.g. invalid IR in unreachable code), we may
- // not even infer the value to have its original address space.
- if (NewAddrSpace == UninitializedAddressSpace)
- continue;
-
+
+ // In some degenerate cases (e.g. invalid IR in unreachable code), we may
+ // not even infer the value to have its original address space.
+ if (NewAddrSpace == UninitializedAddressSpace)
+ continue;
+
if (V->getType()->getPointerAddressSpace() != NewAddrSpace) {
Value *New = cloneValueWithNewAddressSpace(
V, NewAddrSpace, ValueWithNewAddrSpace, &UndefUsesToFix);
@@ -1097,9 +1097,9 @@ bool InferAddressSpacesImpl::rewriteWithNewAddressSpaces(
}
User *CurUser = U.getUser();
- // Skip if the current user is the new value itself.
- if (CurUser == NewV)
- continue;
+ // Skip if the current user is the new value itself.
+ if (CurUser == NewV)
+ continue;
// Handle more complex cases like intrinsic that need to be remangled.
if (auto *MI = dyn_cast<MemIntrinsic>(CurUser)) {
if (!MI->isVolatile() && handleMemIntrinsicPtrUse(MI, V, NewV))
@@ -1186,34 +1186,34 @@ bool InferAddressSpacesImpl::rewriteWithNewAddressSpaces(
return true;
}
-bool InferAddressSpaces::runOnFunction(Function &F) {
- if (skipFunction(F))
- return false;
-
- return InferAddressSpacesImpl(
- &getAnalysis<TargetTransformInfoWrapperPass>().getTTI(F),
- FlatAddrSpace)
- .run(F);
-}
-
+bool InferAddressSpaces::runOnFunction(Function &F) {
+ if (skipFunction(F))
+ return false;
+
+ return InferAddressSpacesImpl(
+ &getAnalysis<TargetTransformInfoWrapperPass>().getTTI(F),
+ FlatAddrSpace)
+ .run(F);
+}
+
FunctionPass *llvm::createInferAddressSpacesPass(unsigned AddressSpace) {
return new InferAddressSpaces(AddressSpace);
}
-
-InferAddressSpacesPass::InferAddressSpacesPass()
- : FlatAddrSpace(UninitializedAddressSpace) {}
-InferAddressSpacesPass::InferAddressSpacesPass(unsigned AddressSpace)
- : FlatAddrSpace(AddressSpace) {}
-
-PreservedAnalyses InferAddressSpacesPass::run(Function &F,
- FunctionAnalysisManager &AM) {
- bool Changed =
- InferAddressSpacesImpl(&AM.getResult<TargetIRAnalysis>(F), FlatAddrSpace)
- .run(F);
- if (Changed) {
- PreservedAnalyses PA;
- PA.preserveSet<CFGAnalyses>();
- return PA;
- }
- return PreservedAnalyses::all();
-}
+
+InferAddressSpacesPass::InferAddressSpacesPass()
+ : FlatAddrSpace(UninitializedAddressSpace) {}
+InferAddressSpacesPass::InferAddressSpacesPass(unsigned AddressSpace)
+ : FlatAddrSpace(AddressSpace) {}
+
+PreservedAnalyses InferAddressSpacesPass::run(Function &F,
+ FunctionAnalysisManager &AM) {
+ bool Changed =
+ InferAddressSpacesImpl(&AM.getResult<TargetIRAnalysis>(F), FlatAddrSpace)
+ .run(F);
+ if (Changed) {
+ PreservedAnalyses PA;
+ PA.preserveSet<CFGAnalyses>();
+ return PA;
+ }
+ return PreservedAnalyses::all();
+}