1//===--------------------- NVPTXAliasAnalysis.cpp--------------------------===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8/// \file
9/// This is the NVPTX address space based alias analysis pass.
10//===----------------------------------------------------------------------===//
11
12#include "NVPTXAliasAnalysis.h"
13#include "MCTargetDesc/NVPTXBaseInfo.h"
14#include "NVPTX.h"
15#include "llvm/Analysis/ValueTracking.h"
16#include "llvm/IR/InlineAsm.h"
17#include "llvm/IR/Instructions.h"
18#include "llvm/Support/CommandLine.h"
19
20using namespace llvm;
21
22#define DEBUG_TYPE "NVPTX-aa"
23
24static cl::opt<unsigned> TraverseAddressSpacesLimit(
25 "nvptx-traverse-address-aliasing-limit", cl::Hidden,
26 cl::desc("Depth limit for finding address space through traversal"),
27 cl::init(Val: 6));
28
29AnalysisKey NVPTXAA::Key;
30
31char NVPTXAAWrapperPass::ID = 0;
32char NVPTXExternalAAWrapper::ID = 0;
33
34INITIALIZE_PASS(NVPTXAAWrapperPass, "nvptx-aa",
35 "NVPTX Address space based Alias Analysis", false, true)
36
37INITIALIZE_PASS(NVPTXExternalAAWrapper, "nvptx-aa-wrapper",
38 "NVPTX Address space based Alias Analysis Wrapper", false, true)
39
40ImmutablePass *llvm::createNVPTXAAWrapperPass() {
41 return new NVPTXAAWrapperPass();
42}
43
44ImmutablePass *llvm::createNVPTXExternalAAWrapperPass() {
45 return new NVPTXExternalAAWrapper();
46}
47
48NVPTXAAWrapperPass::NVPTXAAWrapperPass() : ImmutablePass(ID) {}
49
50void NVPTXAAWrapperPass::getAnalysisUsage(AnalysisUsage &AU) const {
51 AU.setPreservesAll();
52}
53
54static unsigned getAddressSpace(const Value *V, unsigned MaxLookup) {
55 // Find the first non-generic address space traversing the UD chain.
56 // It is undefined behaviour if a pointer belongs to more than one
57 // non-overlapping address spaces along a valid execution path.
58 auto GetAS = [](const Value *V) -> unsigned {
59 if (const auto *PTy = dyn_cast<PointerType>(Val: V->getType()))
60 return PTy->getAddressSpace();
61 return ADDRESS_SPACE_GENERIC;
62 };
63 while (MaxLookup-- && GetAS(V) == ADDRESS_SPACE_GENERIC) {
64 const Value *NewV = getUnderlyingObject(V, MaxLookup: 1);
65 if (NewV == V)
66 break;
67 V = NewV;
68 }
69 return GetAS(V);
70}
71
72static AliasResult::Kind getAliasResult(unsigned AS1, unsigned AS2) {
73 if ((AS1 == ADDRESS_SPACE_GENERIC) || (AS2 == ADDRESS_SPACE_GENERIC))
74 return AliasResult::MayAlias;
75
76 // PTX s6.4.1.1. Generic Addressing:
77 // A generic address maps to global memory unless it falls within
78 // the window for const, local, or shared memory. The Kernel
79 // Function Parameters (.param) window is contained within the
80 // .global window.
81 //
82 // Therefore a global pointer may alias with a param pointer on some
83 // GPUs via addrspacecast(param->generic->global) when cvta.param
84 // instruction is used (PTX 7.7+ and SM_70+).
85 //
86 // TODO: cvta.param is not yet supported. We need to change aliasing
87 // rules once it is added.
88
89 // Distributed shared memory aliases with shared memory.
90 if (((AS1 == ADDRESS_SPACE_SHARED) &&
91 (AS2 == ADDRESS_SPACE_SHARED_CLUSTER)) ||
92 ((AS1 == ADDRESS_SPACE_SHARED_CLUSTER) && (AS2 == ADDRESS_SPACE_SHARED)))
93 return AliasResult::MayAlias;
94
95 return (AS1 == AS2 ? AliasResult::MayAlias : AliasResult::NoAlias);
96}
97
98AliasResult NVPTXAAResult::alias(const MemoryLocation &Loc1,
99 const MemoryLocation &Loc2, AAQueryInfo &AAQI,
100 const Instruction *) {
101 unsigned AS1 = getAddressSpace(V: Loc1.Ptr, MaxLookup: TraverseAddressSpacesLimit);
102 unsigned AS2 = getAddressSpace(V: Loc2.Ptr, MaxLookup: TraverseAddressSpacesLimit);
103
104 return getAliasResult(AS1, AS2);
105}
106
107// TODO: .param address space may be writable in presence of cvta.param, but
108// this instruction is currently not supported. NVPTXLowerArgs also does not
109// allow any writes to .param pointers.
110static bool isConstOrParam(unsigned AS) {
111 return AS == AddressSpace::ADDRESS_SPACE_CONST ||
112 AS == AddressSpace::ADDRESS_SPACE_PARAM;
113}
114
115ModRefInfo NVPTXAAResult::getModRefInfoMask(const MemoryLocation &Loc,
116 AAQueryInfo &AAQI,
117 bool IgnoreLocals) {
118 if (isConstOrParam(AS: getAddressSpace(V: Loc.Ptr, MaxLookup: TraverseAddressSpacesLimit)))
119 return ModRefInfo::NoModRef;
120
121 return ModRefInfo::ModRef;
122}
123
124MemoryEffects NVPTXAAResult::getMemoryEffects(const CallBase *Call,
125 AAQueryInfo &AAQI) {
126 // Inline assembly with no side-effect or memory clobbers should not
127 // indirectly access memory in the PTX specification.
128 if (const auto *IA = dyn_cast<InlineAsm>(Val: Call->getCalledOperand())) {
129 // Volatile is translated as side-effects.
130 if (IA->hasSideEffects())
131 return MemoryEffects::unknown();
132
133 for (const InlineAsm::ConstraintInfo &Constraint : IA->ParseConstraints()) {
134 // Indirect constraints (e.g. =*m) are unsupported in inline PTX.
135 if (Constraint.isIndirect)
136 return MemoryEffects::unknown();
137
138 // Memory clobbers prevent optimization.
139 if ((Constraint.Type & InlineAsm::ConstraintPrefix::isClobber) &&
140 any_of(Range: Constraint.Codes,
141 P: [](const auto &Code) { return Code == "{memory}"; }))
142 return MemoryEffects::unknown();
143 }
144 return MemoryEffects::none();
145 }
146
147 return MemoryEffects::unknown();
148}
149