1//===-- IntrinsicInst.cpp - Intrinsic Instruction Wrappers ---------------===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8//
9// This file implements methods that make it really easy to deal with intrinsic
10// functions.
11//
12// All intrinsic function calls are instances of the call instruction, so these
13// are all subclasses of the CallInst class. Note that none of these classes
14// has state or virtual methods, which is an important part of this gross/neat
15// hack working.
16//
17// In some cases, arguments to intrinsics need to be generic and are defined as
18// type pointer to empty struct { }*. To access the real item of interest the
19// cast instruction needs to be stripped away.
20//
21//===----------------------------------------------------------------------===//
22
23#include "llvm/IR/IntrinsicInst.h"
24#include "llvm/ADT/StringSwitch.h"
25#include "llvm/IR/Constants.h"
26#include "llvm/IR/DebugInfoMetadata.h"
27#include "llvm/IR/Metadata.h"
28#include "llvm/IR/Module.h"
29#include "llvm/IR/Operator.h"
30#include "llvm/IR/PatternMatch.h"
31#include "llvm/IR/Statepoint.h"
32#include <optional>
33
34using namespace llvm;
35
36bool IntrinsicInst::mayLowerToFunctionCall(Intrinsic::ID IID) {
37 switch (IID) {
38 case Intrinsic::objc_autorelease:
39 case Intrinsic::objc_autoreleasePoolPop:
40 case Intrinsic::objc_autoreleasePoolPush:
41 case Intrinsic::objc_autoreleaseReturnValue:
42 case Intrinsic::objc_copyWeak:
43 case Intrinsic::objc_destroyWeak:
44 case Intrinsic::objc_initWeak:
45 case Intrinsic::objc_loadWeak:
46 case Intrinsic::objc_loadWeakRetained:
47 case Intrinsic::objc_moveWeak:
48 case Intrinsic::objc_release:
49 case Intrinsic::objc_retain:
50 case Intrinsic::objc_retainAutorelease:
51 case Intrinsic::objc_retainAutoreleaseReturnValue:
52 case Intrinsic::objc_retainAutoreleasedReturnValue:
53 case Intrinsic::objc_retainBlock:
54 case Intrinsic::objc_storeStrong:
55 case Intrinsic::objc_storeWeak:
56 case Intrinsic::objc_unsafeClaimAutoreleasedReturnValue:
57 case Intrinsic::objc_retainedObject:
58 case Intrinsic::objc_unretainedObject:
59 case Intrinsic::objc_unretainedPointer:
60 case Intrinsic::objc_retain_autorelease:
61 case Intrinsic::objc_sync_enter:
62 case Intrinsic::objc_sync_exit:
63 return true;
64 default:
65 return false;
66 }
67}
68
69//===----------------------------------------------------------------------===//
70/// DbgVariableIntrinsic - This is the common base class for debug info
71/// intrinsics for variables.
72///
73
74iterator_range<location_op_iterator> RawLocationWrapper::location_ops() const {
75 Metadata *MD = getRawLocation();
76 assert(MD && "First operand of DbgVariableIntrinsic should be non-null.");
77 // If operand is ValueAsMetadata, return a range over just that operand.
78 if (auto *VAM = dyn_cast<ValueAsMetadata>(Val: MD)) {
79 return {location_op_iterator(VAM), location_op_iterator(VAM + 1)};
80 }
81 // If operand is DIArgList, return a range over its args.
82 if (auto *AL = dyn_cast<DIArgList>(Val: MD))
83 return {location_op_iterator(AL->args_begin()),
84 location_op_iterator(AL->args_end())};
85 // Operand must be an empty metadata tuple, so return empty iterator.
86 return {location_op_iterator(static_cast<ValueAsMetadata *>(nullptr)),
87 location_op_iterator(static_cast<ValueAsMetadata *>(nullptr))};
88}
89
90iterator_range<location_op_iterator>
91DbgVariableIntrinsic::location_ops() const {
92 return getWrappedLocation().location_ops();
93}
94
95Value *DbgVariableIntrinsic::getVariableLocationOp(unsigned OpIdx) const {
96 return getWrappedLocation().getVariableLocationOp(OpIdx);
97}
98
99Value *RawLocationWrapper::getVariableLocationOp(unsigned OpIdx) const {
100 Metadata *MD = getRawLocation();
101 assert(MD && "First operand of DbgVariableIntrinsic should be non-null.");
102 if (auto *AL = dyn_cast<DIArgList>(Val: MD))
103 return AL->getArgs()[OpIdx]->getValue();
104 if (isa<MDNode>(Val: MD))
105 return nullptr;
106 assert(
107 isa<ValueAsMetadata>(MD) &&
108 "Attempted to get location operand from DbgVariableIntrinsic with none.");
109 auto *V = cast<ValueAsMetadata>(Val: MD);
110 assert(OpIdx == 0 && "Operand Index must be 0 for a debug intrinsic with a "
111 "single location operand.");
112 return V->getValue();
113}
114
115static ValueAsMetadata *getAsMetadata(Value *V) {
116 return isa<MetadataAsValue>(Val: V) ? dyn_cast<ValueAsMetadata>(
117 Val: cast<MetadataAsValue>(Val: V)->getMetadata())
118 : ValueAsMetadata::get(V);
119}
120
121void DbgVariableIntrinsic::replaceVariableLocationOp(Value *OldValue,
122 Value *NewValue,
123 bool AllowEmpty) {
124 // If OldValue is used as the address part of a dbg.assign intrinsic replace
125 // it with NewValue and return true.
126 auto ReplaceDbgAssignAddress = [this, OldValue, NewValue]() -> bool {
127 auto *DAI = dyn_cast<DbgAssignIntrinsic>(Val: this);
128 if (!DAI || OldValue != DAI->getAddress())
129 return false;
130 DAI->setAddress(NewValue);
131 return true;
132 };
133 bool DbgAssignAddrReplaced = ReplaceDbgAssignAddress();
134 (void)DbgAssignAddrReplaced;
135
136 assert(NewValue && "Values must be non-null");
137 auto Locations = location_ops();
138 auto OldIt = find(Range&: Locations, Val: OldValue);
139 if (OldIt == Locations.end()) {
140 if (AllowEmpty || DbgAssignAddrReplaced)
141 return;
142 assert(DbgAssignAddrReplaced &&
143 "OldValue must be dbg.assign addr if unused in DIArgList");
144 return;
145 }
146
147 assert(OldIt != Locations.end() && "OldValue must be a current location");
148 if (!hasArgList()) {
149 Value *NewOperand = isa<MetadataAsValue>(Val: NewValue)
150 ? NewValue
151 : MetadataAsValue::get(
152 Context&: getContext(), MD: ValueAsMetadata::get(V: NewValue));
153 return setArgOperand(i: 0, v: NewOperand);
154 }
155 SmallVector<ValueAsMetadata *, 4> MDs;
156 ValueAsMetadata *NewOperand = getAsMetadata(V: NewValue);
157 for (auto *VMD : Locations)
158 MDs.push_back(Elt: VMD == *OldIt ? NewOperand : getAsMetadata(V: VMD));
159 setArgOperand(
160 i: 0, v: MetadataAsValue::get(Context&: getContext(), MD: DIArgList::get(Context&: getContext(), Args: MDs)));
161}
162void DbgVariableIntrinsic::replaceVariableLocationOp(unsigned OpIdx,
163 Value *NewValue) {
164 assert(OpIdx < getNumVariableLocationOps() && "Invalid Operand Index");
165 if (!hasArgList()) {
166 Value *NewOperand = isa<MetadataAsValue>(Val: NewValue)
167 ? NewValue
168 : MetadataAsValue::get(
169 Context&: getContext(), MD: ValueAsMetadata::get(V: NewValue));
170 return setArgOperand(i: 0, v: NewOperand);
171 }
172 SmallVector<ValueAsMetadata *, 4> MDs;
173 ValueAsMetadata *NewOperand = getAsMetadata(V: NewValue);
174 for (unsigned Idx = 0; Idx < getNumVariableLocationOps(); ++Idx)
175 MDs.push_back(Elt: Idx == OpIdx ? NewOperand
176 : getAsMetadata(V: getVariableLocationOp(OpIdx: Idx)));
177 setArgOperand(
178 i: 0, v: MetadataAsValue::get(Context&: getContext(), MD: DIArgList::get(Context&: getContext(), Args: MDs)));
179}
180
181void DbgVariableIntrinsic::addVariableLocationOps(ArrayRef<Value *> NewValues,
182 DIExpression *NewExpr) {
183 assert(NewExpr->hasAllLocationOps(getNumVariableLocationOps() +
184 NewValues.size()) &&
185 "NewExpr for debug variable intrinsic does not reference every "
186 "location operand.");
187 assert(!is_contained(NewValues, nullptr) && "New values must be non-null");
188 setArgOperand(i: 2, v: MetadataAsValue::get(Context&: getContext(), MD: NewExpr));
189 SmallVector<ValueAsMetadata *, 4> MDs;
190 for (auto *VMD : location_ops())
191 MDs.push_back(Elt: getAsMetadata(V: VMD));
192 for (auto *VMD : NewValues)
193 MDs.push_back(Elt: getAsMetadata(V: VMD));
194 setArgOperand(
195 i: 0, v: MetadataAsValue::get(Context&: getContext(), MD: DIArgList::get(Context&: getContext(), Args: MDs)));
196}
197
198std::optional<uint64_t> DbgVariableIntrinsic::getFragmentSizeInBits() const {
199 if (auto Fragment = getExpression()->getFragmentInfo())
200 return Fragment->SizeInBits;
201 return getVariable()->getSizeInBits();
202}
203
204Value *DbgAssignIntrinsic::getAddress() const {
205 auto *MD = getRawAddress();
206 if (auto *V = dyn_cast<ValueAsMetadata>(Val: MD))
207 return V->getValue();
208
209 // When the value goes to null, it gets replaced by an empty MDNode.
210 assert(!cast<MDNode>(MD)->getNumOperands() && "Expected an empty MDNode");
211 return nullptr;
212}
213
214void DbgAssignIntrinsic::setAssignId(DIAssignID *New) {
215 setOperand(i: OpAssignID, v: MetadataAsValue::get(Context&: getContext(), MD: New));
216}
217
218void DbgAssignIntrinsic::setAddress(Value *V) {
219 setOperand(i: OpAddress,
220 v: MetadataAsValue::get(Context&: getContext(), MD: ValueAsMetadata::get(V)));
221}
222
223void DbgAssignIntrinsic::setKillAddress() {
224 if (isKillAddress())
225 return;
226 setAddress(UndefValue::get(T: getAddress()->getType()));
227}
228
229bool DbgAssignIntrinsic::isKillAddress() const {
230 Value *Addr = getAddress();
231 return !Addr || isa<UndefValue>(Val: Addr);
232}
233
234void DbgAssignIntrinsic::setValue(Value *V) {
235 setOperand(i: OpValue,
236 v: MetadataAsValue::get(Context&: getContext(), MD: ValueAsMetadata::get(V)));
237}
238
239int llvm::Intrinsic::lookupLLVMIntrinsicByName(ArrayRef<const char *> NameTable,
240 StringRef Name) {
241 assert(Name.starts_with("llvm.") && "Unexpected intrinsic prefix");
242
243 // Do successive binary searches of the dotted name components. For
244 // "llvm.gc.experimental.statepoint.p1i8.p1i32", we will find the range of
245 // intrinsics starting with "llvm.gc", then "llvm.gc.experimental", then
246 // "llvm.gc.experimental.statepoint", and then we will stop as the range is
247 // size 1. During the search, we can skip the prefix that we already know is
248 // identical. By using strncmp we consider names with differing suffixes to
249 // be part of the equal range.
250 size_t CmpEnd = 4; // Skip the "llvm" component.
251 const char *const *Low = NameTable.begin();
252 const char *const *High = NameTable.end();
253 const char *const *LastLow = Low;
254 while (CmpEnd < Name.size() && High - Low > 0) {
255 size_t CmpStart = CmpEnd;
256 CmpEnd = Name.find(C: '.', From: CmpStart + 1);
257 CmpEnd = CmpEnd == StringRef::npos ? Name.size() : CmpEnd;
258 auto Cmp = [CmpStart, CmpEnd](const char *LHS, const char *RHS) {
259 return strncmp(s1: LHS + CmpStart, s2: RHS + CmpStart, n: CmpEnd - CmpStart) < 0;
260 };
261 LastLow = Low;
262 std::tie(args&: Low, args&: High) = std::equal_range(first: Low, last: High, val: Name.data(), comp: Cmp);
263 }
264 if (High - Low > 0)
265 LastLow = Low;
266
267 if (LastLow == NameTable.end())
268 return -1;
269 StringRef NameFound = *LastLow;
270 if (Name == NameFound ||
271 (Name.starts_with(Prefix: NameFound) && Name[NameFound.size()] == '.'))
272 return LastLow - NameTable.begin();
273 return -1;
274}
275
276ConstantInt *InstrProfCntrInstBase::getNumCounters() const {
277 if (InstrProfValueProfileInst::classof(I: this))
278 llvm_unreachable("InstrProfValueProfileInst does not have counters!");
279 return cast<ConstantInt>(Val: const_cast<Value *>(getArgOperand(i: 2)));
280}
281
282ConstantInt *InstrProfCntrInstBase::getIndex() const {
283 if (InstrProfValueProfileInst::classof(I: this))
284 llvm_unreachable("Please use InstrProfValueProfileInst::getIndex()");
285 return cast<ConstantInt>(Val: const_cast<Value *>(getArgOperand(i: 3)));
286}
287
288Value *InstrProfIncrementInst::getStep() const {
289 if (InstrProfIncrementInstStep::classof(I: this)) {
290 return const_cast<Value *>(getArgOperand(i: 4));
291 }
292 const Module *M = getModule();
293 LLVMContext &Context = M->getContext();
294 return ConstantInt::get(Ty: Type::getInt64Ty(C&: Context), V: 1);
295}
296
297Value *InstrProfCallsite::getCallee() const {
298 if (isa<InstrProfCallsite>(Val: this))
299 return getArgOperand(i: 4);
300 return nullptr;
301}
302
303std::optional<RoundingMode> ConstrainedFPIntrinsic::getRoundingMode() const {
304 unsigned NumOperands = arg_size();
305 Metadata *MD = nullptr;
306 auto *MAV = dyn_cast<MetadataAsValue>(Val: getArgOperand(i: NumOperands - 2));
307 if (MAV)
308 MD = MAV->getMetadata();
309 if (!MD || !isa<MDString>(Val: MD))
310 return std::nullopt;
311 return convertStrToRoundingMode(cast<MDString>(Val: MD)->getString());
312}
313
314std::optional<fp::ExceptionBehavior>
315ConstrainedFPIntrinsic::getExceptionBehavior() const {
316 unsigned NumOperands = arg_size();
317 Metadata *MD = nullptr;
318 auto *MAV = dyn_cast<MetadataAsValue>(Val: getArgOperand(i: NumOperands - 1));
319 if (MAV)
320 MD = MAV->getMetadata();
321 if (!MD || !isa<MDString>(Val: MD))
322 return std::nullopt;
323 return convertStrToExceptionBehavior(cast<MDString>(Val: MD)->getString());
324}
325
326bool ConstrainedFPIntrinsic::isDefaultFPEnvironment() const {
327 std::optional<fp::ExceptionBehavior> Except = getExceptionBehavior();
328 if (Except) {
329 if (*Except != fp::ebIgnore)
330 return false;
331 }
332
333 std::optional<RoundingMode> Rounding = getRoundingMode();
334 if (Rounding) {
335 if (*Rounding != RoundingMode::NearestTiesToEven)
336 return false;
337 }
338
339 return true;
340}
341
342static FCmpInst::Predicate getFPPredicateFromMD(const Value *Op) {
343 Metadata *MD = cast<MetadataAsValue>(Val: Op)->getMetadata();
344 if (!MD || !isa<MDString>(Val: MD))
345 return FCmpInst::BAD_FCMP_PREDICATE;
346 return StringSwitch<FCmpInst::Predicate>(cast<MDString>(Val: MD)->getString())
347 .Case(S: "oeq", Value: FCmpInst::FCMP_OEQ)
348 .Case(S: "ogt", Value: FCmpInst::FCMP_OGT)
349 .Case(S: "oge", Value: FCmpInst::FCMP_OGE)
350 .Case(S: "olt", Value: FCmpInst::FCMP_OLT)
351 .Case(S: "ole", Value: FCmpInst::FCMP_OLE)
352 .Case(S: "one", Value: FCmpInst::FCMP_ONE)
353 .Case(S: "ord", Value: FCmpInst::FCMP_ORD)
354 .Case(S: "uno", Value: FCmpInst::FCMP_UNO)
355 .Case(S: "ueq", Value: FCmpInst::FCMP_UEQ)
356 .Case(S: "ugt", Value: FCmpInst::FCMP_UGT)
357 .Case(S: "uge", Value: FCmpInst::FCMP_UGE)
358 .Case(S: "ult", Value: FCmpInst::FCMP_ULT)
359 .Case(S: "ule", Value: FCmpInst::FCMP_ULE)
360 .Case(S: "une", Value: FCmpInst::FCMP_UNE)
361 .Default(Value: FCmpInst::BAD_FCMP_PREDICATE);
362}
363
364FCmpInst::Predicate ConstrainedFPCmpIntrinsic::getPredicate() const {
365 return getFPPredicateFromMD(Op: getArgOperand(i: 2));
366}
367
368unsigned ConstrainedFPIntrinsic::getNonMetadataArgCount() const {
369 // All constrained fp intrinsics have "fpexcept" metadata.
370 unsigned NumArgs = arg_size() - 1;
371
372 // Some intrinsics have "round" metadata.
373 if (Intrinsic::hasConstrainedFPRoundingModeOperand(QID: getIntrinsicID()))
374 NumArgs -= 1;
375
376 // Compare intrinsics take their predicate as metadata.
377 if (isa<ConstrainedFPCmpIntrinsic>(Val: this))
378 NumArgs -= 1;
379
380 return NumArgs;
381}
382
383bool ConstrainedFPIntrinsic::classof(const IntrinsicInst *I) {
384 return Intrinsic::isConstrainedFPIntrinsic(QID: I->getIntrinsicID());
385}
386
387ElementCount VPIntrinsic::getStaticVectorLength() const {
388 auto GetVectorLengthOfType = [](const Type *T) -> ElementCount {
389 const auto *VT = cast<VectorType>(Val: T);
390 auto ElemCount = VT->getElementCount();
391 return ElemCount;
392 };
393
394 Value *VPMask = getMaskParam();
395 if (!VPMask) {
396 assert((getIntrinsicID() == Intrinsic::vp_merge ||
397 getIntrinsicID() == Intrinsic::vp_select) &&
398 "Unexpected VP intrinsic without mask operand");
399 return GetVectorLengthOfType(getType());
400 }
401 return GetVectorLengthOfType(VPMask->getType());
402}
403
404Value *VPIntrinsic::getMaskParam() const {
405 if (auto MaskPos = getMaskParamPos(IntrinsicID: getIntrinsicID()))
406 return getArgOperand(i: *MaskPos);
407 return nullptr;
408}
409
410void VPIntrinsic::setMaskParam(Value *NewMask) {
411 auto MaskPos = getMaskParamPos(IntrinsicID: getIntrinsicID());
412 setArgOperand(i: *MaskPos, v: NewMask);
413}
414
415Value *VPIntrinsic::getVectorLengthParam() const {
416 if (auto EVLPos = getVectorLengthParamPos(IntrinsicID: getIntrinsicID()))
417 return getArgOperand(i: *EVLPos);
418 return nullptr;
419}
420
421void VPIntrinsic::setVectorLengthParam(Value *NewEVL) {
422 auto EVLPos = getVectorLengthParamPos(IntrinsicID: getIntrinsicID());
423 setArgOperand(i: *EVLPos, v: NewEVL);
424}
425
426std::optional<unsigned>
427VPIntrinsic::getMaskParamPos(Intrinsic::ID IntrinsicID) {
428 switch (IntrinsicID) {
429 default:
430 return std::nullopt;
431
432#define BEGIN_REGISTER_VP_INTRINSIC(VPID, MASKPOS, VLENPOS) \
433 case Intrinsic::VPID: \
434 return MASKPOS;
435#include "llvm/IR/VPIntrinsics.def"
436 }
437}
438
439std::optional<unsigned>
440VPIntrinsic::getVectorLengthParamPos(Intrinsic::ID IntrinsicID) {
441 switch (IntrinsicID) {
442 default:
443 return std::nullopt;
444
445#define BEGIN_REGISTER_VP_INTRINSIC(VPID, MASKPOS, VLENPOS) \
446 case Intrinsic::VPID: \
447 return VLENPOS;
448#include "llvm/IR/VPIntrinsics.def"
449 }
450}
451
452/// \return the alignment of the pointer used by this load/store/gather or
453/// scatter.
454MaybeAlign VPIntrinsic::getPointerAlignment() const {
455 std::optional<unsigned> PtrParamOpt =
456 getMemoryPointerParamPos(getIntrinsicID());
457 assert(PtrParamOpt && "no pointer argument!");
458 return getParamAlign(ArgNo: *PtrParamOpt);
459}
460
461/// \return The pointer operand of this load,store, gather or scatter.
462Value *VPIntrinsic::getMemoryPointerParam() const {
463 if (auto PtrParamOpt = getMemoryPointerParamPos(getIntrinsicID()))
464 return getArgOperand(i: *PtrParamOpt);
465 return nullptr;
466}
467
468std::optional<unsigned>
469VPIntrinsic::getMemoryPointerParamPos(Intrinsic::ID VPID) {
470 switch (VPID) {
471 default:
472 break;
473#define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
474#define VP_PROPERTY_MEMOP(POINTERPOS, ...) return POINTERPOS;
475#define END_REGISTER_VP_INTRINSIC(VPID) break;
476#include "llvm/IR/VPIntrinsics.def"
477 }
478 return std::nullopt;
479}
480
481/// \return The data (payload) operand of this store or scatter.
482Value *VPIntrinsic::getMemoryDataParam() const {
483 auto DataParamOpt = getMemoryDataParamPos(getIntrinsicID());
484 if (!DataParamOpt)
485 return nullptr;
486 return getArgOperand(i: *DataParamOpt);
487}
488
489std::optional<unsigned> VPIntrinsic::getMemoryDataParamPos(Intrinsic::ID VPID) {
490 switch (VPID) {
491 default:
492 break;
493#define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
494#define VP_PROPERTY_MEMOP(POINTERPOS, DATAPOS) return DATAPOS;
495#define END_REGISTER_VP_INTRINSIC(VPID) break;
496#include "llvm/IR/VPIntrinsics.def"
497 }
498 return std::nullopt;
499}
500
501constexpr bool isVPIntrinsic(Intrinsic::ID ID) {
502 switch (ID) {
503 default:
504 break;
505#define BEGIN_REGISTER_VP_INTRINSIC(VPID, MASKPOS, VLENPOS) \
506 case Intrinsic::VPID: \
507 return true;
508#include "llvm/IR/VPIntrinsics.def"
509 }
510 return false;
511}
512
513bool VPIntrinsic::isVPIntrinsic(Intrinsic::ID ID) {
514 return ::isVPIntrinsic(ID);
515}
516
517// Equivalent non-predicated opcode
518constexpr static std::optional<unsigned>
519getFunctionalOpcodeForVP(Intrinsic::ID ID) {
520 switch (ID) {
521 default:
522 break;
523#define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
524#define VP_PROPERTY_FUNCTIONAL_OPC(OPC) return Instruction::OPC;
525#define END_REGISTER_VP_INTRINSIC(VPID) break;
526#include "llvm/IR/VPIntrinsics.def"
527 }
528 return std::nullopt;
529}
530
531std::optional<unsigned>
532VPIntrinsic::getFunctionalOpcodeForVP(Intrinsic::ID ID) {
533 return ::getFunctionalOpcodeForVP(ID);
534}
535
536// Equivalent non-predicated intrinsic ID
537constexpr static std::optional<Intrinsic::ID>
538getFunctionalIntrinsicIDForVP(Intrinsic::ID ID) {
539 switch (ID) {
540 default:
541 break;
542#define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
543#define VP_PROPERTY_FUNCTIONAL_INTRINSIC(INTRIN) return Intrinsic::INTRIN;
544#define END_REGISTER_VP_INTRINSIC(VPID) break;
545#include "llvm/IR/VPIntrinsics.def"
546 }
547 return std::nullopt;
548}
549
550std::optional<Intrinsic::ID>
551VPIntrinsic::getFunctionalIntrinsicIDForVP(Intrinsic::ID ID) {
552 return ::getFunctionalIntrinsicIDForVP(ID);
553}
554
555constexpr static bool doesVPHaveNoFunctionalEquivalent(Intrinsic::ID ID) {
556 switch (ID) {
557 default:
558 break;
559#define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
560#define VP_PROPERTY_NO_FUNCTIONAL return true;
561#define END_REGISTER_VP_INTRINSIC(VPID) break;
562#include "llvm/IR/VPIntrinsics.def"
563 }
564 return false;
565}
566
567// All VP intrinsics should have an equivalent non-VP opcode or intrinsic
568// defined, or be marked that they don't have one.
569#define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) \
570 static_assert(doesVPHaveNoFunctionalEquivalent(Intrinsic::VPID) || \
571 getFunctionalOpcodeForVP(Intrinsic::VPID) || \
572 getFunctionalIntrinsicIDForVP(Intrinsic::VPID));
573#include "llvm/IR/VPIntrinsics.def"
574
575// Equivalent non-predicated constrained intrinsic
576std::optional<Intrinsic::ID>
577VPIntrinsic::getConstrainedIntrinsicIDForVP(Intrinsic::ID ID) {
578 switch (ID) {
579 default:
580 break;
581#define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
582#define VP_PROPERTY_CONSTRAINEDFP(HASRND, HASEXCEPT, CID) return Intrinsic::CID;
583#define END_REGISTER_VP_INTRINSIC(VPID) break;
584#include "llvm/IR/VPIntrinsics.def"
585 }
586 return std::nullopt;
587}
588
589Intrinsic::ID VPIntrinsic::getForOpcode(unsigned IROPC) {
590 switch (IROPC) {
591 default:
592 break;
593
594#define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) break;
595#define VP_PROPERTY_FUNCTIONAL_OPC(OPC) case Instruction::OPC:
596#define END_REGISTER_VP_INTRINSIC(VPID) return Intrinsic::VPID;
597#include "llvm/IR/VPIntrinsics.def"
598 }
599 return Intrinsic::not_intrinsic;
600}
601
602constexpr static Intrinsic::ID getForIntrinsic(Intrinsic::ID Id) {
603 if (::isVPIntrinsic(ID: Id))
604 return Id;
605
606 switch (Id) {
607 default:
608 break;
609#define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) break;
610#define VP_PROPERTY_FUNCTIONAL_INTRINSIC(INTRIN) case Intrinsic::INTRIN:
611#define END_REGISTER_VP_INTRINSIC(VPID) return Intrinsic::VPID;
612#include "llvm/IR/VPIntrinsics.def"
613 }
614 return Intrinsic::not_intrinsic;
615}
616
617Intrinsic::ID VPIntrinsic::getForIntrinsic(Intrinsic::ID Id) {
618 return ::getForIntrinsic(Id);
619}
620
621bool VPIntrinsic::canIgnoreVectorLengthParam() const {
622 using namespace PatternMatch;
623
624 ElementCount EC = getStaticVectorLength();
625
626 // No vlen param - no lanes masked-off by it.
627 auto *VLParam = getVectorLengthParam();
628 if (!VLParam)
629 return true;
630
631 // Note that the VP intrinsic causes undefined behavior if the Explicit Vector
632 // Length parameter is strictly greater-than the number of vector elements of
633 // the operation. This function returns true when this is detected statically
634 // in the IR.
635
636 // Check whether "W == vscale * EC.getKnownMinValue()"
637 if (EC.isScalable()) {
638 // Compare vscale patterns
639 uint64_t VScaleFactor;
640 if (match(V: VLParam, P: m_Mul(L: m_VScale(), R: m_ConstantInt(V&: VScaleFactor))))
641 return VScaleFactor >= EC.getKnownMinValue();
642 return (EC.getKnownMinValue() == 1) && match(V: VLParam, P: m_VScale());
643 }
644
645 // standard SIMD operation
646 const auto *VLConst = dyn_cast<ConstantInt>(Val: VLParam);
647 if (!VLConst)
648 return false;
649
650 uint64_t VLNum = VLConst->getZExtValue();
651 if (VLNum >= EC.getKnownMinValue())
652 return true;
653
654 return false;
655}
656
657Function *VPIntrinsic::getDeclarationForParams(Module *M, Intrinsic::ID VPID,
658 Type *ReturnType,
659 ArrayRef<Value *> Params) {
660 assert(isVPIntrinsic(VPID) && "not a VP intrinsic");
661 Function *VPFunc;
662 switch (VPID) {
663 default: {
664 Type *OverloadTy = Params[0]->getType();
665 if (VPReductionIntrinsic::isVPReduction(ID: VPID))
666 OverloadTy =
667 Params[*VPReductionIntrinsic::getVectorParamPos(ID: VPID)]->getType();
668
669 VPFunc = Intrinsic::getDeclaration(M, id: VPID, Tys: OverloadTy);
670 break;
671 }
672 case Intrinsic::vp_trunc:
673 case Intrinsic::vp_sext:
674 case Intrinsic::vp_zext:
675 case Intrinsic::vp_fptoui:
676 case Intrinsic::vp_fptosi:
677 case Intrinsic::vp_uitofp:
678 case Intrinsic::vp_sitofp:
679 case Intrinsic::vp_fptrunc:
680 case Intrinsic::vp_fpext:
681 case Intrinsic::vp_ptrtoint:
682 case Intrinsic::vp_inttoptr:
683 case Intrinsic::vp_lrint:
684 case Intrinsic::vp_llrint:
685 case Intrinsic::vp_cttz_elts:
686 VPFunc =
687 Intrinsic::getDeclaration(M, id: VPID, Tys: {ReturnType, Params[0]->getType()});
688 break;
689 case Intrinsic::vp_is_fpclass:
690 VPFunc = Intrinsic::getDeclaration(M, id: VPID, Tys: {Params[0]->getType()});
691 break;
692 case Intrinsic::vp_merge:
693 case Intrinsic::vp_select:
694 VPFunc = Intrinsic::getDeclaration(M, id: VPID, Tys: {Params[1]->getType()});
695 break;
696 case Intrinsic::vp_load:
697 VPFunc = Intrinsic::getDeclaration(
698 M, id: VPID, Tys: {ReturnType, Params[0]->getType()});
699 break;
700 case Intrinsic::experimental_vp_strided_load:
701 VPFunc = Intrinsic::getDeclaration(
702 M, id: VPID, Tys: {ReturnType, Params[0]->getType(), Params[1]->getType()});
703 break;
704 case Intrinsic::vp_gather:
705 VPFunc = Intrinsic::getDeclaration(
706 M, id: VPID, Tys: {ReturnType, Params[0]->getType()});
707 break;
708 case Intrinsic::vp_store:
709 VPFunc = Intrinsic::getDeclaration(
710 M, id: VPID, Tys: {Params[0]->getType(), Params[1]->getType()});
711 break;
712 case Intrinsic::experimental_vp_strided_store:
713 VPFunc = Intrinsic::getDeclaration(
714 M, id: VPID,
715 Tys: {Params[0]->getType(), Params[1]->getType(), Params[2]->getType()});
716 break;
717 case Intrinsic::vp_scatter:
718 VPFunc = Intrinsic::getDeclaration(
719 M, id: VPID, Tys: {Params[0]->getType(), Params[1]->getType()});
720 break;
721 case Intrinsic::experimental_vp_splat:
722 VPFunc = Intrinsic::getDeclaration(M, id: VPID, Tys: ReturnType);
723 break;
724 }
725 assert(VPFunc && "Could not declare VP intrinsic");
726 return VPFunc;
727}
728
729bool VPReductionIntrinsic::isVPReduction(Intrinsic::ID ID) {
730 switch (ID) {
731 default:
732 break;
733#define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
734#define VP_PROPERTY_REDUCTION(STARTPOS, ...) return true;
735#define END_REGISTER_VP_INTRINSIC(VPID) break;
736#include "llvm/IR/VPIntrinsics.def"
737 }
738 return false;
739}
740
741bool VPCastIntrinsic::isVPCast(Intrinsic::ID ID) {
742 switch (ID) {
743 default:
744 break;
745#define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
746#define VP_PROPERTY_CASTOP return true;
747#define END_REGISTER_VP_INTRINSIC(VPID) break;
748#include "llvm/IR/VPIntrinsics.def"
749 }
750 return false;
751}
752
753bool VPCmpIntrinsic::isVPCmp(Intrinsic::ID ID) {
754 switch (ID) {
755 default:
756 break;
757#define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
758#define VP_PROPERTY_CMP(CCPOS, ...) return true;
759#define END_REGISTER_VP_INTRINSIC(VPID) break;
760#include "llvm/IR/VPIntrinsics.def"
761 }
762 return false;
763}
764
765bool VPBinOpIntrinsic::isVPBinOp(Intrinsic::ID ID) {
766 switch (ID) {
767 default:
768 break;
769#define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
770#define VP_PROPERTY_BINARYOP return true;
771#define END_REGISTER_VP_INTRINSIC(VPID) break;
772#include "llvm/IR/VPIntrinsics.def"
773 }
774 return false;
775}
776
777static ICmpInst::Predicate getIntPredicateFromMD(const Value *Op) {
778 Metadata *MD = cast<MetadataAsValue>(Val: Op)->getMetadata();
779 if (!MD || !isa<MDString>(Val: MD))
780 return ICmpInst::BAD_ICMP_PREDICATE;
781 return StringSwitch<ICmpInst::Predicate>(cast<MDString>(Val: MD)->getString())
782 .Case(S: "eq", Value: ICmpInst::ICMP_EQ)
783 .Case(S: "ne", Value: ICmpInst::ICMP_NE)
784 .Case(S: "ugt", Value: ICmpInst::ICMP_UGT)
785 .Case(S: "uge", Value: ICmpInst::ICMP_UGE)
786 .Case(S: "ult", Value: ICmpInst::ICMP_ULT)
787 .Case(S: "ule", Value: ICmpInst::ICMP_ULE)
788 .Case(S: "sgt", Value: ICmpInst::ICMP_SGT)
789 .Case(S: "sge", Value: ICmpInst::ICMP_SGE)
790 .Case(S: "slt", Value: ICmpInst::ICMP_SLT)
791 .Case(S: "sle", Value: ICmpInst::ICMP_SLE)
792 .Default(Value: ICmpInst::BAD_ICMP_PREDICATE);
793}
794
795CmpInst::Predicate VPCmpIntrinsic::getPredicate() const {
796 bool IsFP = true;
797 std::optional<unsigned> CCArgIdx;
798 switch (getIntrinsicID()) {
799 default:
800 break;
801#define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
802#define VP_PROPERTY_CMP(CCPOS, ISFP) \
803 CCArgIdx = CCPOS; \
804 IsFP = ISFP; \
805 break;
806#define END_REGISTER_VP_INTRINSIC(VPID) break;
807#include "llvm/IR/VPIntrinsics.def"
808 }
809 assert(CCArgIdx && "Unexpected vector-predicated comparison");
810 return IsFP ? getFPPredicateFromMD(Op: getArgOperand(i: *CCArgIdx))
811 : getIntPredicateFromMD(Op: getArgOperand(i: *CCArgIdx));
812}
813
814unsigned VPReductionIntrinsic::getVectorParamPos() const {
815 return *VPReductionIntrinsic::getVectorParamPos(ID: getIntrinsicID());
816}
817
818unsigned VPReductionIntrinsic::getStartParamPos() const {
819 return *VPReductionIntrinsic::getStartParamPos(ID: getIntrinsicID());
820}
821
822std::optional<unsigned>
823VPReductionIntrinsic::getVectorParamPos(Intrinsic::ID ID) {
824 switch (ID) {
825#define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
826#define VP_PROPERTY_REDUCTION(STARTPOS, VECTORPOS) return VECTORPOS;
827#define END_REGISTER_VP_INTRINSIC(VPID) break;
828#include "llvm/IR/VPIntrinsics.def"
829 default:
830 break;
831 }
832 return std::nullopt;
833}
834
835std::optional<unsigned>
836VPReductionIntrinsic::getStartParamPos(Intrinsic::ID ID) {
837 switch (ID) {
838#define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
839#define VP_PROPERTY_REDUCTION(STARTPOS, VECTORPOS) return STARTPOS;
840#define END_REGISTER_VP_INTRINSIC(VPID) break;
841#include "llvm/IR/VPIntrinsics.def"
842 default:
843 break;
844 }
845 return std::nullopt;
846}
847
848Instruction::BinaryOps BinaryOpIntrinsic::getBinaryOp() const {
849 switch (getIntrinsicID()) {
850 case Intrinsic::uadd_with_overflow:
851 case Intrinsic::sadd_with_overflow:
852 case Intrinsic::uadd_sat:
853 case Intrinsic::sadd_sat:
854 return Instruction::Add;
855 case Intrinsic::usub_with_overflow:
856 case Intrinsic::ssub_with_overflow:
857 case Intrinsic::usub_sat:
858 case Intrinsic::ssub_sat:
859 return Instruction::Sub;
860 case Intrinsic::umul_with_overflow:
861 case Intrinsic::smul_with_overflow:
862 return Instruction::Mul;
863 default:
864 llvm_unreachable("Invalid intrinsic");
865 }
866}
867
868bool BinaryOpIntrinsic::isSigned() const {
869 switch (getIntrinsicID()) {
870 case Intrinsic::sadd_with_overflow:
871 case Intrinsic::ssub_with_overflow:
872 case Intrinsic::smul_with_overflow:
873 case Intrinsic::sadd_sat:
874 case Intrinsic::ssub_sat:
875 return true;
876 default:
877 return false;
878 }
879}
880
881unsigned BinaryOpIntrinsic::getNoWrapKind() const {
882 if (isSigned())
883 return OverflowingBinaryOperator::NoSignedWrap;
884 else
885 return OverflowingBinaryOperator::NoUnsignedWrap;
886}
887
888const Value *GCProjectionInst::getStatepoint() const {
889 const Value *Token = getArgOperand(i: 0);
890 if (isa<UndefValue>(Val: Token))
891 return Token;
892
893 // Treat none token as if it was undef here
894 if (isa<ConstantTokenNone>(Val: Token))
895 return UndefValue::get(T: Token->getType());
896
897 // This takes care both of relocates for call statepoints and relocates
898 // on normal path of invoke statepoint.
899 if (!isa<LandingPadInst>(Val: Token))
900 return cast<GCStatepointInst>(Val: Token);
901
902 // This relocate is on exceptional path of an invoke statepoint
903 const BasicBlock *InvokeBB =
904 cast<Instruction>(Val: Token)->getParent()->getUniquePredecessor();
905
906 assert(InvokeBB && "safepoints should have unique landingpads");
907 assert(InvokeBB->getTerminator() &&
908 "safepoint block should be well formed");
909
910 return cast<GCStatepointInst>(Val: InvokeBB->getTerminator());
911}
912
913Value *GCRelocateInst::getBasePtr() const {
914 auto Statepoint = getStatepoint();
915 if (isa<UndefValue>(Val: Statepoint))
916 return UndefValue::get(T: Statepoint->getType());
917
918 auto *GCInst = cast<GCStatepointInst>(Val: Statepoint);
919 if (auto Opt = GCInst->getOperandBundle(ID: LLVMContext::OB_gc_live))
920 return *(Opt->Inputs.begin() + getBasePtrIndex());
921 return *(GCInst->arg_begin() + getBasePtrIndex());
922}
923
924Value *GCRelocateInst::getDerivedPtr() const {
925 auto *Statepoint = getStatepoint();
926 if (isa<UndefValue>(Val: Statepoint))
927 return UndefValue::get(T: Statepoint->getType());
928
929 auto *GCInst = cast<GCStatepointInst>(Val: Statepoint);
930 if (auto Opt = GCInst->getOperandBundle(ID: LLVMContext::OB_gc_live))
931 return *(Opt->Inputs.begin() + getDerivedPtrIndex());
932 return *(GCInst->arg_begin() + getDerivedPtrIndex());
933}
934