1//===-- IntrinsicInst.cpp - Intrinsic Instruction Wrappers ---------------===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8//
9// This file implements methods that make it really easy to deal with intrinsic
10// functions.
11//
12// All intrinsic function calls are instances of the call instruction, so these
13// are all subclasses of the CallInst class. Note that none of these classes
14// has state or virtual methods, which is an important part of this gross/neat
15// hack working.
16//
17// In some cases, arguments to intrinsics need to be generic and are defined as
18// type pointer to empty struct { }*. To access the real item of interest the
19// cast instruction needs to be stripped away.
20//
21//===----------------------------------------------------------------------===//
22
23#include "llvm/IR/IntrinsicInst.h"
24#include "llvm/ADT/StringSwitch.h"
25#include "llvm/IR/Constants.h"
26#include "llvm/IR/DebugInfoMetadata.h"
27#include "llvm/IR/Metadata.h"
28#include "llvm/IR/Module.h"
29#include "llvm/IR/Operator.h"
30#include "llvm/IR/PatternMatch.h"
31#include "llvm/IR/Statepoint.h"
32#include <optional>
33
34using namespace llvm;
35
36bool IntrinsicInst::mayLowerToFunctionCall(Intrinsic::ID IID) {
37 switch (IID) {
38 case Intrinsic::objc_autorelease:
39 case Intrinsic::objc_autoreleasePoolPop:
40 case Intrinsic::objc_autoreleasePoolPush:
41 case Intrinsic::objc_autoreleaseReturnValue:
42 case Intrinsic::objc_copyWeak:
43 case Intrinsic::objc_destroyWeak:
44 case Intrinsic::objc_initWeak:
45 case Intrinsic::objc_loadWeak:
46 case Intrinsic::objc_loadWeakRetained:
47 case Intrinsic::objc_moveWeak:
48 case Intrinsic::objc_release:
49 case Intrinsic::objc_retain:
50 case Intrinsic::objc_retainAutorelease:
51 case Intrinsic::objc_retainAutoreleaseReturnValue:
52 case Intrinsic::objc_retainAutoreleasedReturnValue:
53 case Intrinsic::objc_retainBlock:
54 case Intrinsic::objc_storeStrong:
55 case Intrinsic::objc_storeWeak:
56 case Intrinsic::objc_unsafeClaimAutoreleasedReturnValue:
57 case Intrinsic::objc_retainedObject:
58 case Intrinsic::objc_unretainedObject:
59 case Intrinsic::objc_unretainedPointer:
60 case Intrinsic::objc_retain_autorelease:
61 case Intrinsic::objc_sync_enter:
62 case Intrinsic::objc_sync_exit:
63 return true;
64 default:
65 return false;
66 }
67}
68
69//===----------------------------------------------------------------------===//
70/// DbgVariableIntrinsic - This is the common base class for debug info
71/// intrinsics for variables.
72///
73
74iterator_range<location_op_iterator> RawLocationWrapper::location_ops() const {
75 Metadata *MD = getRawLocation();
76 assert(MD && "First operand of DbgVariableIntrinsic should be non-null.");
77 // If operand is ValueAsMetadata, return a range over just that operand.
78 if (auto *VAM = dyn_cast<ValueAsMetadata>(Val: MD)) {
79 return {location_op_iterator(VAM), location_op_iterator(VAM + 1)};
80 }
81 // If operand is DIArgList, return a range over its args.
82 if (auto *AL = dyn_cast<DIArgList>(Val: MD))
83 return {location_op_iterator(AL->args_begin()),
84 location_op_iterator(AL->args_end())};
85 // Operand must be an empty metadata tuple, so return empty iterator.
86 return {location_op_iterator(static_cast<ValueAsMetadata *>(nullptr)),
87 location_op_iterator(static_cast<ValueAsMetadata *>(nullptr))};
88}
89
90iterator_range<location_op_iterator>
91DbgVariableIntrinsic::location_ops() const {
92 return getWrappedLocation().location_ops();
93}
94
95Value *DbgVariableIntrinsic::getVariableLocationOp(unsigned OpIdx) const {
96 return getWrappedLocation().getVariableLocationOp(OpIdx);
97}
98
99Value *RawLocationWrapper::getVariableLocationOp(unsigned OpIdx) const {
100 Metadata *MD = getRawLocation();
101 assert(MD && "First operand of DbgVariableIntrinsic should be non-null.");
102 if (auto *AL = dyn_cast<DIArgList>(Val: MD))
103 return AL->getArgs()[OpIdx]->getValue();
104 if (isa<MDNode>(Val: MD))
105 return nullptr;
106 assert(
107 isa<ValueAsMetadata>(MD) &&
108 "Attempted to get location operand from DbgVariableIntrinsic with none.");
109 auto *V = cast<ValueAsMetadata>(Val: MD);
110 assert(OpIdx == 0 && "Operand Index must be 0 for a debug intrinsic with a "
111 "single location operand.");
112 return V->getValue();
113}
114
115static ValueAsMetadata *getAsMetadata(Value *V) {
116 return isa<MetadataAsValue>(Val: V) ? dyn_cast<ValueAsMetadata>(
117 Val: cast<MetadataAsValue>(Val: V)->getMetadata())
118 : ValueAsMetadata::get(V);
119}
120
121void DbgVariableIntrinsic::replaceVariableLocationOp(Value *OldValue,
122 Value *NewValue,
123 bool AllowEmpty) {
124 // If OldValue is used as the address part of a dbg.assign intrinsic replace
125 // it with NewValue and return true.
126 auto ReplaceDbgAssignAddress = [this, OldValue, NewValue]() -> bool {
127 auto *DAI = dyn_cast<DbgAssignIntrinsic>(Val: this);
128 if (!DAI || OldValue != DAI->getAddress())
129 return false;
130 DAI->setAddress(NewValue);
131 return true;
132 };
133 bool DbgAssignAddrReplaced = ReplaceDbgAssignAddress();
134 (void)DbgAssignAddrReplaced;
135
136 assert(NewValue && "Values must be non-null");
137 auto Locations = location_ops();
138 auto OldIt = find(Range&: Locations, Val: OldValue);
139 if (OldIt == Locations.end()) {
140 if (AllowEmpty || DbgAssignAddrReplaced)
141 return;
142 assert(DbgAssignAddrReplaced &&
143 "OldValue must be dbg.assign addr if unused in DIArgList");
144 return;
145 }
146
147 assert(OldIt != Locations.end() && "OldValue must be a current location");
148 if (!hasArgList()) {
149 Value *NewOperand = isa<MetadataAsValue>(Val: NewValue)
150 ? NewValue
151 : MetadataAsValue::get(
152 Context&: getContext(), MD: ValueAsMetadata::get(V: NewValue));
153 return setArgOperand(i: 0, v: NewOperand);
154 }
155 SmallVector<ValueAsMetadata *, 4> MDs;
156 ValueAsMetadata *NewOperand = getAsMetadata(V: NewValue);
157 for (auto *VMD : Locations)
158 MDs.push_back(Elt: VMD == *OldIt ? NewOperand : getAsMetadata(V: VMD));
159 setArgOperand(
160 i: 0, v: MetadataAsValue::get(Context&: getContext(), MD: DIArgList::get(Context&: getContext(), Args: MDs)));
161}
162void DbgVariableIntrinsic::replaceVariableLocationOp(unsigned OpIdx,
163 Value *NewValue) {
164 assert(OpIdx < getNumVariableLocationOps() && "Invalid Operand Index");
165 if (!hasArgList()) {
166 Value *NewOperand = isa<MetadataAsValue>(Val: NewValue)
167 ? NewValue
168 : MetadataAsValue::get(
169 Context&: getContext(), MD: ValueAsMetadata::get(V: NewValue));
170 return setArgOperand(i: 0, v: NewOperand);
171 }
172 SmallVector<ValueAsMetadata *, 4> MDs;
173 ValueAsMetadata *NewOperand = getAsMetadata(V: NewValue);
174 for (unsigned Idx = 0; Idx < getNumVariableLocationOps(); ++Idx)
175 MDs.push_back(Elt: Idx == OpIdx ? NewOperand
176 : getAsMetadata(V: getVariableLocationOp(OpIdx: Idx)));
177 setArgOperand(
178 i: 0, v: MetadataAsValue::get(Context&: getContext(), MD: DIArgList::get(Context&: getContext(), Args: MDs)));
179}
180
181void DbgVariableIntrinsic::addVariableLocationOps(ArrayRef<Value *> NewValues,
182 DIExpression *NewExpr) {
183 assert(NewExpr->hasAllLocationOps(getNumVariableLocationOps() +
184 NewValues.size()) &&
185 "NewExpr for debug variable intrinsic does not reference every "
186 "location operand.");
187 assert(!is_contained(NewValues, nullptr) && "New values must be non-null");
188 setArgOperand(i: 2, v: MetadataAsValue::get(Context&: getContext(), MD: NewExpr));
189 SmallVector<ValueAsMetadata *, 4> MDs;
190 for (auto *VMD : location_ops())
191 MDs.push_back(Elt: getAsMetadata(V: VMD));
192 for (auto *VMD : NewValues)
193 MDs.push_back(Elt: getAsMetadata(V: VMD));
194 setArgOperand(
195 i: 0, v: MetadataAsValue::get(Context&: getContext(), MD: DIArgList::get(Context&: getContext(), Args: MDs)));
196}
197
198std::optional<uint64_t> DbgVariableIntrinsic::getFragmentSizeInBits() const {
199 if (auto Fragment = getExpression()->getFragmentInfo())
200 return Fragment->SizeInBits;
201 return getVariable()->getSizeInBits();
202}
203
204Value *DbgAssignIntrinsic::getAddress() const {
205 auto *MD = getRawAddress();
206 if (auto *V = dyn_cast<ValueAsMetadata>(Val: MD))
207 return V->getValue();
208
209 // When the value goes to null, it gets replaced by an empty MDNode.
210 assert(!cast<MDNode>(MD)->getNumOperands() && "Expected an empty MDNode");
211 return nullptr;
212}
213
214void DbgAssignIntrinsic::setAssignId(DIAssignID *New) {
215 setOperand(i: OpAssignID, v: MetadataAsValue::get(Context&: getContext(), MD: New));
216}
217
218void DbgAssignIntrinsic::setAddress(Value *V) {
219 setOperand(i: OpAddress,
220 v: MetadataAsValue::get(Context&: getContext(), MD: ValueAsMetadata::get(V)));
221}
222
223void DbgAssignIntrinsic::setKillAddress() {
224 if (isKillAddress())
225 return;
226 setAddress(PoisonValue::get(T: getAddress()->getType()));
227}
228
229bool DbgAssignIntrinsic::isKillAddress() const {
230 Value *Addr = getAddress();
231 return !Addr || isa<UndefValue>(Val: Addr);
232}
233
234void DbgAssignIntrinsic::setValue(Value *V) {
235 setOperand(i: OpValue,
236 v: MetadataAsValue::get(Context&: getContext(), MD: ValueAsMetadata::get(V)));
237}
238
239ConstantInt *InstrProfCntrInstBase::getNumCounters() const {
240 if (InstrProfValueProfileInst::classof(I: this))
241 llvm_unreachable("InstrProfValueProfileInst does not have counters!");
242 return cast<ConstantInt>(Val: getArgOperand(i: 2));
243}
244
245ConstantInt *InstrProfCntrInstBase::getIndex() const {
246 if (InstrProfValueProfileInst::classof(I: this))
247 llvm_unreachable("Please use InstrProfValueProfileInst::getIndex()");
248 return cast<ConstantInt>(Val: getArgOperand(i: 3));
249}
250
251void InstrProfCntrInstBase::setIndex(uint32_t Idx) {
252 assert(isa<InstrProfCntrInstBase>(this));
253 setArgOperand(i: 3, v: ConstantInt::get(Ty: Type::getInt32Ty(C&: getContext()), V: Idx));
254}
255
256Value *InstrProfIncrementInst::getStep() const {
257 if (InstrProfIncrementInstStep::classof(I: this)) {
258 return getArgOperand(i: 4);
259 }
260 const Module *M = getModule();
261 LLVMContext &Context = M->getContext();
262 return ConstantInt::get(Ty: Type::getInt64Ty(C&: Context), V: 1);
263}
264
265Value *InstrProfCallsite::getCallee() const {
266 if (isa<InstrProfCallsite>(Val: this))
267 return getArgOperand(i: 4);
268 return nullptr;
269}
270
271void InstrProfCallsite::setCallee(Value *Callee) {
272 assert(isa<InstrProfCallsite>(this));
273 setArgOperand(i: 4, v: Callee);
274}
275
276std::optional<RoundingMode> ConstrainedFPIntrinsic::getRoundingMode() const {
277 unsigned NumOperands = arg_size();
278 Metadata *MD = nullptr;
279 auto *MAV = dyn_cast<MetadataAsValue>(Val: getArgOperand(i: NumOperands - 2));
280 if (MAV)
281 MD = MAV->getMetadata();
282 if (!MD || !isa<MDString>(Val: MD))
283 return std::nullopt;
284 return convertStrToRoundingMode(cast<MDString>(Val: MD)->getString());
285}
286
287std::optional<fp::ExceptionBehavior>
288ConstrainedFPIntrinsic::getExceptionBehavior() const {
289 unsigned NumOperands = arg_size();
290 Metadata *MD = nullptr;
291 auto *MAV = dyn_cast<MetadataAsValue>(Val: getArgOperand(i: NumOperands - 1));
292 if (MAV)
293 MD = MAV->getMetadata();
294 if (!MD || !isa<MDString>(Val: MD))
295 return std::nullopt;
296 return convertStrToExceptionBehavior(cast<MDString>(Val: MD)->getString());
297}
298
299bool ConstrainedFPIntrinsic::isDefaultFPEnvironment() const {
300 std::optional<fp::ExceptionBehavior> Except = getExceptionBehavior();
301 if (Except) {
302 if (*Except != fp::ebIgnore)
303 return false;
304 }
305
306 std::optional<RoundingMode> Rounding = getRoundingMode();
307 if (Rounding) {
308 if (*Rounding != RoundingMode::NearestTiesToEven)
309 return false;
310 }
311
312 return true;
313}
314
315static FCmpInst::Predicate getFPPredicateFromMD(const Value *Op) {
316 Metadata *MD = cast<MetadataAsValue>(Val: Op)->getMetadata();
317 if (!MD || !isa<MDString>(Val: MD))
318 return FCmpInst::BAD_FCMP_PREDICATE;
319 return StringSwitch<FCmpInst::Predicate>(cast<MDString>(Val: MD)->getString())
320 .Case(S: "oeq", Value: FCmpInst::FCMP_OEQ)
321 .Case(S: "ogt", Value: FCmpInst::FCMP_OGT)
322 .Case(S: "oge", Value: FCmpInst::FCMP_OGE)
323 .Case(S: "olt", Value: FCmpInst::FCMP_OLT)
324 .Case(S: "ole", Value: FCmpInst::FCMP_OLE)
325 .Case(S: "one", Value: FCmpInst::FCMP_ONE)
326 .Case(S: "ord", Value: FCmpInst::FCMP_ORD)
327 .Case(S: "uno", Value: FCmpInst::FCMP_UNO)
328 .Case(S: "ueq", Value: FCmpInst::FCMP_UEQ)
329 .Case(S: "ugt", Value: FCmpInst::FCMP_UGT)
330 .Case(S: "uge", Value: FCmpInst::FCMP_UGE)
331 .Case(S: "ult", Value: FCmpInst::FCMP_ULT)
332 .Case(S: "ule", Value: FCmpInst::FCMP_ULE)
333 .Case(S: "une", Value: FCmpInst::FCMP_UNE)
334 .Default(Value: FCmpInst::BAD_FCMP_PREDICATE);
335}
336
337FCmpInst::Predicate ConstrainedFPCmpIntrinsic::getPredicate() const {
338 return getFPPredicateFromMD(Op: getArgOperand(i: 2));
339}
340
341unsigned ConstrainedFPIntrinsic::getNonMetadataArgCount() const {
342 // All constrained fp intrinsics have "fpexcept" metadata.
343 unsigned NumArgs = arg_size() - 1;
344
345 // Some intrinsics have "round" metadata.
346 if (Intrinsic::hasConstrainedFPRoundingModeOperand(QID: getIntrinsicID()))
347 NumArgs -= 1;
348
349 // Compare intrinsics take their predicate as metadata.
350 if (isa<ConstrainedFPCmpIntrinsic>(Val: this))
351 NumArgs -= 1;
352
353 return NumArgs;
354}
355
356bool ConstrainedFPIntrinsic::classof(const IntrinsicInst *I) {
357 return Intrinsic::isConstrainedFPIntrinsic(QID: I->getIntrinsicID());
358}
359
360ElementCount VPIntrinsic::getStaticVectorLength() const {
361 auto GetVectorLengthOfType = [](const Type *T) -> ElementCount {
362 const auto *VT = cast<VectorType>(Val: T);
363 auto ElemCount = VT->getElementCount();
364 return ElemCount;
365 };
366
367 Value *VPMask = getMaskParam();
368 if (!VPMask) {
369 assert((getIntrinsicID() == Intrinsic::vp_merge ||
370 getIntrinsicID() == Intrinsic::vp_select) &&
371 "Unexpected VP intrinsic without mask operand");
372 return GetVectorLengthOfType(getType());
373 }
374 return GetVectorLengthOfType(VPMask->getType());
375}
376
377Value *VPIntrinsic::getMaskParam() const {
378 if (auto MaskPos = getMaskParamPos(IntrinsicID: getIntrinsicID()))
379 return getArgOperand(i: *MaskPos);
380 return nullptr;
381}
382
383void VPIntrinsic::setMaskParam(Value *NewMask) {
384 auto MaskPos = getMaskParamPos(IntrinsicID: getIntrinsicID());
385 setArgOperand(i: *MaskPos, v: NewMask);
386}
387
388Value *VPIntrinsic::getVectorLengthParam() const {
389 if (auto EVLPos = getVectorLengthParamPos(IntrinsicID: getIntrinsicID()))
390 return getArgOperand(i: *EVLPos);
391 return nullptr;
392}
393
394void VPIntrinsic::setVectorLengthParam(Value *NewEVL) {
395 auto EVLPos = getVectorLengthParamPos(IntrinsicID: getIntrinsicID());
396 setArgOperand(i: *EVLPos, v: NewEVL);
397}
398
399std::optional<unsigned>
400VPIntrinsic::getMaskParamPos(Intrinsic::ID IntrinsicID) {
401 switch (IntrinsicID) {
402 default:
403 return std::nullopt;
404
405#define BEGIN_REGISTER_VP_INTRINSIC(VPID, MASKPOS, VLENPOS) \
406 case Intrinsic::VPID: \
407 return MASKPOS;
408#include "llvm/IR/VPIntrinsics.def"
409 }
410}
411
412std::optional<unsigned>
413VPIntrinsic::getVectorLengthParamPos(Intrinsic::ID IntrinsicID) {
414 switch (IntrinsicID) {
415 default:
416 return std::nullopt;
417
418#define BEGIN_REGISTER_VP_INTRINSIC(VPID, MASKPOS, VLENPOS) \
419 case Intrinsic::VPID: \
420 return VLENPOS;
421#include "llvm/IR/VPIntrinsics.def"
422 }
423}
424
425/// \return the alignment of the pointer used by this load/store/gather or
426/// scatter.
427MaybeAlign VPIntrinsic::getPointerAlignment() const {
428 std::optional<unsigned> PtrParamOpt =
429 getMemoryPointerParamPos(getIntrinsicID());
430 assert(PtrParamOpt && "no pointer argument!");
431 return getParamAlign(ArgNo: *PtrParamOpt);
432}
433
434/// \return The pointer operand of this load,store, gather or scatter.
435Value *VPIntrinsic::getMemoryPointerParam() const {
436 if (auto PtrParamOpt = getMemoryPointerParamPos(getIntrinsicID()))
437 return getArgOperand(i: *PtrParamOpt);
438 return nullptr;
439}
440
441std::optional<unsigned>
442VPIntrinsic::getMemoryPointerParamPos(Intrinsic::ID VPID) {
443 switch (VPID) {
444 default:
445 return std::nullopt;
446 case Intrinsic::vp_store:
447 case Intrinsic::vp_scatter:
448 case Intrinsic::experimental_vp_strided_store:
449 return 1;
450 case Intrinsic::vp_load:
451 case Intrinsic::vp_load_ff:
452 case Intrinsic::vp_gather:
453 case Intrinsic::experimental_vp_strided_load:
454 return 0;
455 }
456}
457
458/// \return The data (payload) operand of this store or scatter.
459Value *VPIntrinsic::getMemoryDataParam() const {
460 auto DataParamOpt = getMemoryDataParamPos(getIntrinsicID());
461 if (!DataParamOpt)
462 return nullptr;
463 return getArgOperand(i: *DataParamOpt);
464}
465
466std::optional<unsigned> VPIntrinsic::getMemoryDataParamPos(Intrinsic::ID VPID) {
467 switch (VPID) {
468 default:
469 return std::nullopt;
470 case Intrinsic::vp_store:
471 case Intrinsic::vp_scatter:
472 case Intrinsic::experimental_vp_strided_store:
473 return 0;
474 }
475}
476
477constexpr bool isVPIntrinsic(Intrinsic::ID ID) {
478 switch (ID) {
479 default:
480 break;
481#define BEGIN_REGISTER_VP_INTRINSIC(VPID, MASKPOS, VLENPOS) \
482 case Intrinsic::VPID: \
483 return true;
484#include "llvm/IR/VPIntrinsics.def"
485 }
486 return false;
487}
488
489bool VPIntrinsic::isVPIntrinsic(Intrinsic::ID ID) {
490 return ::isVPIntrinsic(ID);
491}
492
493// Equivalent non-predicated opcode
494constexpr static std::optional<unsigned>
495getFunctionalOpcodeForVP(Intrinsic::ID ID) {
496 switch (ID) {
497 default:
498 break;
499#define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
500#define VP_PROPERTY_FUNCTIONAL_OPC(OPC) return Instruction::OPC;
501#define END_REGISTER_VP_INTRINSIC(VPID) break;
502#include "llvm/IR/VPIntrinsics.def"
503 }
504 return std::nullopt;
505}
506
507std::optional<unsigned>
508VPIntrinsic::getFunctionalOpcodeForVP(Intrinsic::ID ID) {
509 return ::getFunctionalOpcodeForVP(ID);
510}
511
512// Equivalent non-predicated intrinsic ID
513constexpr static std::optional<Intrinsic::ID>
514getFunctionalIntrinsicIDForVP(Intrinsic::ID ID) {
515 switch (ID) {
516 default:
517 break;
518#define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
519#define VP_PROPERTY_FUNCTIONAL_INTRINSIC(INTRIN) return Intrinsic::INTRIN;
520#define END_REGISTER_VP_INTRINSIC(VPID) break;
521#include "llvm/IR/VPIntrinsics.def"
522 }
523 return std::nullopt;
524}
525
526std::optional<Intrinsic::ID>
527VPIntrinsic::getFunctionalIntrinsicIDForVP(Intrinsic::ID ID) {
528 return ::getFunctionalIntrinsicIDForVP(ID);
529}
530
531constexpr static bool doesVPHaveNoFunctionalEquivalent(Intrinsic::ID ID) {
532 switch (ID) {
533 default:
534 break;
535#define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
536#define VP_PROPERTY_NO_FUNCTIONAL return true;
537#define END_REGISTER_VP_INTRINSIC(VPID) break;
538#include "llvm/IR/VPIntrinsics.def"
539 }
540 return false;
541}
542
543// All VP intrinsics should have an equivalent non-VP opcode or intrinsic
544// defined, or be marked that they don't have one.
545#define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) \
546 static_assert(doesVPHaveNoFunctionalEquivalent(Intrinsic::VPID) || \
547 getFunctionalOpcodeForVP(Intrinsic::VPID) || \
548 getFunctionalIntrinsicIDForVP(Intrinsic::VPID));
549#include "llvm/IR/VPIntrinsics.def"
550
551// Equivalent non-predicated constrained intrinsic
552std::optional<Intrinsic::ID>
553VPIntrinsic::getConstrainedIntrinsicIDForVP(Intrinsic::ID ID) {
554 switch (ID) {
555 default:
556 break;
557#define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
558#define VP_PROPERTY_CONSTRAINEDFP(CID) return Intrinsic::CID;
559#define END_REGISTER_VP_INTRINSIC(VPID) break;
560#include "llvm/IR/VPIntrinsics.def"
561 }
562 return std::nullopt;
563}
564
565Intrinsic::ID VPIntrinsic::getForOpcode(unsigned IROPC) {
566 switch (IROPC) {
567 default:
568 break;
569
570#define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) break;
571#define VP_PROPERTY_FUNCTIONAL_OPC(OPC) case Instruction::OPC:
572#define END_REGISTER_VP_INTRINSIC(VPID) return Intrinsic::VPID;
573#include "llvm/IR/VPIntrinsics.def"
574 }
575 return Intrinsic::not_intrinsic;
576}
577
578constexpr static Intrinsic::ID getForIntrinsic(Intrinsic::ID Id) {
579 if (::isVPIntrinsic(ID: Id))
580 return Id;
581
582 switch (Id) {
583 default:
584 break;
585#define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) break;
586#define VP_PROPERTY_FUNCTIONAL_INTRINSIC(INTRIN) case Intrinsic::INTRIN:
587#define END_REGISTER_VP_INTRINSIC(VPID) return Intrinsic::VPID;
588#include "llvm/IR/VPIntrinsics.def"
589 }
590 return Intrinsic::not_intrinsic;
591}
592
593Intrinsic::ID VPIntrinsic::getForIntrinsic(Intrinsic::ID Id) {
594 return ::getForIntrinsic(Id);
595}
596
597bool VPIntrinsic::canIgnoreVectorLengthParam() const {
598 using namespace PatternMatch;
599
600 ElementCount EC = getStaticVectorLength();
601
602 // No vlen param - no lanes masked-off by it.
603 auto *VLParam = getVectorLengthParam();
604 if (!VLParam)
605 return true;
606
607 // Note that the VP intrinsic causes undefined behavior if the Explicit Vector
608 // Length parameter is strictly greater-than the number of vector elements of
609 // the operation. This function returns true when this is detected statically
610 // in the IR.
611
612 // Check whether "W == vscale * EC.getKnownMinValue()"
613 if (EC.isScalable()) {
614 // Compare vscale patterns
615 uint64_t VScaleFactor;
616 if (match(V: VLParam, P: m_Mul(L: m_VScale(), R: m_ConstantInt(V&: VScaleFactor))))
617 return VScaleFactor >= EC.getKnownMinValue();
618 return (EC.getKnownMinValue() == 1) && match(V: VLParam, P: m_VScale());
619 }
620
621 // standard SIMD operation
622 const auto *VLConst = dyn_cast<ConstantInt>(Val: VLParam);
623 if (!VLConst)
624 return false;
625
626 uint64_t VLNum = VLConst->getZExtValue();
627 if (VLNum >= EC.getKnownMinValue())
628 return true;
629
630 return false;
631}
632
633Function *VPIntrinsic::getOrInsertDeclarationForParams(
634 Module *M, Intrinsic::ID VPID, Type *ReturnType, ArrayRef<Value *> Params) {
635 assert(isVPIntrinsic(VPID) && "not a VP intrinsic");
636 Function *VPFunc;
637 switch (VPID) {
638 default: {
639 Type *OverloadTy = Params[0]->getType();
640 if (VPReductionIntrinsic::isVPReduction(ID: VPID))
641 OverloadTy =
642 Params[*VPReductionIntrinsic::getVectorParamPos(ID: VPID)]->getType();
643
644 VPFunc = Intrinsic::getOrInsertDeclaration(M, id: VPID, Tys: OverloadTy);
645 break;
646 }
647 case Intrinsic::vp_trunc:
648 case Intrinsic::vp_sext:
649 case Intrinsic::vp_zext:
650 case Intrinsic::vp_fptoui:
651 case Intrinsic::vp_fptosi:
652 case Intrinsic::vp_uitofp:
653 case Intrinsic::vp_sitofp:
654 case Intrinsic::vp_fptrunc:
655 case Intrinsic::vp_fpext:
656 case Intrinsic::vp_ptrtoint:
657 case Intrinsic::vp_inttoptr:
658 case Intrinsic::vp_lrint:
659 case Intrinsic::vp_llrint:
660 case Intrinsic::vp_cttz_elts:
661 VPFunc = Intrinsic::getOrInsertDeclaration(
662 M, id: VPID, Tys: {ReturnType, Params[0]->getType()});
663 break;
664 case Intrinsic::vp_is_fpclass:
665 VPFunc = Intrinsic::getOrInsertDeclaration(M, id: VPID, Tys: {Params[0]->getType()});
666 break;
667 case Intrinsic::vp_merge:
668 case Intrinsic::vp_select:
669 VPFunc = Intrinsic::getOrInsertDeclaration(M, id: VPID, Tys: {Params[1]->getType()});
670 break;
671 case Intrinsic::vp_load:
672 VPFunc = Intrinsic::getOrInsertDeclaration(
673 M, id: VPID, Tys: {ReturnType, Params[0]->getType()});
674 break;
675 case Intrinsic::vp_load_ff:
676 VPFunc = Intrinsic::getOrInsertDeclaration(
677 M, id: VPID, Tys: {ReturnType->getStructElementType(N: 0), Params[0]->getType()});
678 break;
679 case Intrinsic::experimental_vp_strided_load:
680 VPFunc = Intrinsic::getOrInsertDeclaration(
681 M, id: VPID, Tys: {ReturnType, Params[0]->getType(), Params[1]->getType()});
682 break;
683 case Intrinsic::vp_gather:
684 VPFunc = Intrinsic::getOrInsertDeclaration(
685 M, id: VPID, Tys: {ReturnType, Params[0]->getType()});
686 break;
687 case Intrinsic::vp_store:
688 VPFunc = Intrinsic::getOrInsertDeclaration(
689 M, id: VPID, Tys: {Params[0]->getType(), Params[1]->getType()});
690 break;
691 case Intrinsic::experimental_vp_strided_store:
692 VPFunc = Intrinsic::getOrInsertDeclaration(
693 M, id: VPID,
694 Tys: {Params[0]->getType(), Params[1]->getType(), Params[2]->getType()});
695 break;
696 case Intrinsic::vp_scatter:
697 VPFunc = Intrinsic::getOrInsertDeclaration(
698 M, id: VPID, Tys: {Params[0]->getType(), Params[1]->getType()});
699 break;
700 }
701 assert(VPFunc && "Could not declare VP intrinsic");
702 return VPFunc;
703}
704
705bool VPReductionIntrinsic::isVPReduction(Intrinsic::ID ID) {
706 switch (ID) {
707 case Intrinsic::vp_reduce_add:
708 case Intrinsic::vp_reduce_mul:
709 case Intrinsic::vp_reduce_and:
710 case Intrinsic::vp_reduce_or:
711 case Intrinsic::vp_reduce_xor:
712 case Intrinsic::vp_reduce_smax:
713 case Intrinsic::vp_reduce_smin:
714 case Intrinsic::vp_reduce_umax:
715 case Intrinsic::vp_reduce_umin:
716 case Intrinsic::vp_reduce_fmax:
717 case Intrinsic::vp_reduce_fmin:
718 case Intrinsic::vp_reduce_fmaximum:
719 case Intrinsic::vp_reduce_fminimum:
720 case Intrinsic::vp_reduce_fadd:
721 case Intrinsic::vp_reduce_fmul:
722 return true;
723 default:
724 return false;
725 }
726}
727
728bool VPCastIntrinsic::isVPCast(Intrinsic::ID ID) {
729 // All of the vp.casts correspond to instructions
730 if (std::optional<unsigned> Opc = getFunctionalOpcodeForVP(ID))
731 return Instruction::isCast(Opcode: *Opc);
732 return false;
733}
734
735bool VPCmpIntrinsic::isVPCmp(Intrinsic::ID ID) {
736 switch (ID) {
737 default:
738 return false;
739 case Intrinsic::vp_fcmp:
740 case Intrinsic::vp_icmp:
741 return true;
742 }
743}
744
745bool VPBinOpIntrinsic::isVPBinOp(Intrinsic::ID ID) {
746 switch (ID) {
747 default:
748 break;
749#define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
750#define VP_PROPERTY_BINARYOP return true;
751#define END_REGISTER_VP_INTRINSIC(VPID) break;
752#include "llvm/IR/VPIntrinsics.def"
753 }
754 return false;
755}
756
757static ICmpInst::Predicate getIntPredicateFromMD(const Value *Op) {
758 Metadata *MD = cast<MetadataAsValue>(Val: Op)->getMetadata();
759 if (!MD || !isa<MDString>(Val: MD))
760 return ICmpInst::BAD_ICMP_PREDICATE;
761 return StringSwitch<ICmpInst::Predicate>(cast<MDString>(Val: MD)->getString())
762 .Case(S: "eq", Value: ICmpInst::ICMP_EQ)
763 .Case(S: "ne", Value: ICmpInst::ICMP_NE)
764 .Case(S: "ugt", Value: ICmpInst::ICMP_UGT)
765 .Case(S: "uge", Value: ICmpInst::ICMP_UGE)
766 .Case(S: "ult", Value: ICmpInst::ICMP_ULT)
767 .Case(S: "ule", Value: ICmpInst::ICMP_ULE)
768 .Case(S: "sgt", Value: ICmpInst::ICMP_SGT)
769 .Case(S: "sge", Value: ICmpInst::ICMP_SGE)
770 .Case(S: "slt", Value: ICmpInst::ICMP_SLT)
771 .Case(S: "sle", Value: ICmpInst::ICMP_SLE)
772 .Default(Value: ICmpInst::BAD_ICMP_PREDICATE);
773}
774
775CmpInst::Predicate VPCmpIntrinsic::getPredicate() const {
776 assert(isVPCmp(getIntrinsicID()));
777 return getIntrinsicID() == Intrinsic::vp_fcmp
778 ? getFPPredicateFromMD(Op: getArgOperand(i: 2))
779 : getIntPredicateFromMD(Op: getArgOperand(i: 2));
780}
781
782unsigned VPReductionIntrinsic::getVectorParamPos() const {
783 return *VPReductionIntrinsic::getVectorParamPos(ID: getIntrinsicID());
784}
785
786unsigned VPReductionIntrinsic::getStartParamPos() const {
787 return *VPReductionIntrinsic::getStartParamPos(ID: getIntrinsicID());
788}
789
790std::optional<unsigned>
791VPReductionIntrinsic::getVectorParamPos(Intrinsic::ID ID) {
792 if (isVPReduction(ID))
793 return 1;
794 return std::nullopt;
795}
796
797std::optional<unsigned>
798VPReductionIntrinsic::getStartParamPos(Intrinsic::ID ID) {
799 if (isVPReduction(ID))
800 return 0;
801 return std::nullopt;
802}
803
804Instruction::BinaryOps BinaryOpIntrinsic::getBinaryOp() const {
805 switch (getIntrinsicID()) {
806 case Intrinsic::uadd_with_overflow:
807 case Intrinsic::sadd_with_overflow:
808 case Intrinsic::uadd_sat:
809 case Intrinsic::sadd_sat:
810 return Instruction::Add;
811 case Intrinsic::usub_with_overflow:
812 case Intrinsic::ssub_with_overflow:
813 case Intrinsic::usub_sat:
814 case Intrinsic::ssub_sat:
815 return Instruction::Sub;
816 case Intrinsic::umul_with_overflow:
817 case Intrinsic::smul_with_overflow:
818 return Instruction::Mul;
819 default:
820 llvm_unreachable("Invalid intrinsic");
821 }
822}
823
824bool BinaryOpIntrinsic::isSigned() const {
825 switch (getIntrinsicID()) {
826 case Intrinsic::sadd_with_overflow:
827 case Intrinsic::ssub_with_overflow:
828 case Intrinsic::smul_with_overflow:
829 case Intrinsic::sadd_sat:
830 case Intrinsic::ssub_sat:
831 return true;
832 default:
833 return false;
834 }
835}
836
837unsigned BinaryOpIntrinsic::getNoWrapKind() const {
838 if (isSigned())
839 return OverflowingBinaryOperator::NoSignedWrap;
840 else
841 return OverflowingBinaryOperator::NoUnsignedWrap;
842}
843
844const Value *GCProjectionInst::getStatepoint() const {
845 const Value *Token = getArgOperand(i: 0);
846 if (isa<UndefValue>(Val: Token))
847 return Token;
848
849 // Treat none token as if it was undef here
850 if (isa<ConstantTokenNone>(Val: Token))
851 return UndefValue::get(T: Token->getType());
852
853 // This takes care both of relocates for call statepoints and relocates
854 // on normal path of invoke statepoint.
855 if (!isa<LandingPadInst>(Val: Token))
856 return cast<GCStatepointInst>(Val: Token);
857
858 // This relocate is on exceptional path of an invoke statepoint
859 const BasicBlock *InvokeBB =
860 cast<Instruction>(Val: Token)->getParent()->getUniquePredecessor();
861
862 assert(InvokeBB && "safepoints should have unique landingpads");
863 assert(InvokeBB->getTerminator() &&
864 "safepoint block should be well formed");
865
866 return cast<GCStatepointInst>(Val: InvokeBB->getTerminator());
867}
868
869Value *GCRelocateInst::getBasePtr() const {
870 auto Statepoint = getStatepoint();
871 if (isa<UndefValue>(Val: Statepoint))
872 return UndefValue::get(T: Statepoint->getType());
873
874 auto *GCInst = cast<GCStatepointInst>(Val: Statepoint);
875 if (auto Opt = GCInst->getOperandBundle(ID: LLVMContext::OB_gc_live))
876 return *(Opt->Inputs.begin() + getBasePtrIndex());
877 return *(GCInst->arg_begin() + getBasePtrIndex());
878}
879
880Value *GCRelocateInst::getDerivedPtr() const {
881 auto *Statepoint = getStatepoint();
882 if (isa<UndefValue>(Val: Statepoint))
883 return UndefValue::get(T: Statepoint->getType());
884
885 auto *GCInst = cast<GCStatepointInst>(Val: Statepoint);
886 if (auto Opt = GCInst->getOperandBundle(ID: LLVMContext::OB_gc_live))
887 return *(Opt->Inputs.begin() + getDerivedPtrIndex());
888 return *(GCInst->arg_begin() + getDerivedPtrIndex());
889}
890
891ConvergenceControlInst *ConvergenceControlInst::CreateAnchor(BasicBlock &BB) {
892 Module *M = BB.getModule();
893 Function *Fn = Intrinsic::getOrInsertDeclaration(
894 M, id: llvm::Intrinsic::experimental_convergence_anchor);
895 auto *Call = CallInst::Create(Func: Fn, NameStr: "", InsertBefore: BB.getFirstInsertionPt());
896 return cast<ConvergenceControlInst>(Val: Call);
897}
898
899ConvergenceControlInst *ConvergenceControlInst::CreateEntry(BasicBlock &BB) {
900 Module *M = BB.getModule();
901 Function *Fn = Intrinsic::getOrInsertDeclaration(
902 M, id: llvm::Intrinsic::experimental_convergence_entry);
903 auto *Call = CallInst::Create(Func: Fn, NameStr: "", InsertBefore: BB.getFirstInsertionPt());
904 return cast<ConvergenceControlInst>(Val: Call);
905}
906
907ConvergenceControlInst *
908ConvergenceControlInst::CreateLoop(BasicBlock &BB,
909 ConvergenceControlInst *ParentToken) {
910 Module *M = BB.getModule();
911 Function *Fn = Intrinsic::getOrInsertDeclaration(
912 M, id: llvm::Intrinsic::experimental_convergence_loop);
913 llvm::Value *BundleArgs[] = {ParentToken};
914 llvm::OperandBundleDef OB("convergencectrl", BundleArgs);
915 auto *Call = CallInst::Create(Func: Fn, Args: {}, Bundles: {OB}, NameStr: "", InsertBefore: BB.getFirstInsertionPt());
916 return cast<ConvergenceControlInst>(Val: Call);
917}
918