1//===-- IntrinsicInst.cpp - Intrinsic Instruction Wrappers ---------------===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8//
9// This file implements methods that make it really easy to deal with intrinsic
10// functions.
11//
12// All intrinsic function calls are instances of the call instruction, so these
13// are all subclasses of the CallInst class. Note that none of these classes
14// has state or virtual methods, which is an important part of this gross/neat
15// hack working.
16//
17// In some cases, arguments to intrinsics need to be generic and are defined as
18// type pointer to empty struct { }*. To access the real item of interest the
19// cast instruction needs to be stripped away.
20//
21//===----------------------------------------------------------------------===//
22
23#include "llvm/IR/IntrinsicInst.h"
24#include "llvm/ADT/StringSwitch.h"
25#include "llvm/IR/Constants.h"
26#include "llvm/IR/DebugInfoMetadata.h"
27#include "llvm/IR/Metadata.h"
28#include "llvm/IR/Module.h"
29#include "llvm/IR/Operator.h"
30#include "llvm/IR/PatternMatch.h"
31#include "llvm/IR/Statepoint.h"
32#include <optional>
33
34using namespace llvm;
35
36bool IntrinsicInst::mayLowerToFunctionCall(Intrinsic::ID IID) {
37 switch (IID) {
38 case Intrinsic::objc_autorelease:
39 case Intrinsic::objc_autoreleasePoolPop:
40 case Intrinsic::objc_autoreleasePoolPush:
41 case Intrinsic::objc_autoreleaseReturnValue:
42 case Intrinsic::objc_claimAutoreleasedReturnValue:
43 case Intrinsic::objc_copyWeak:
44 case Intrinsic::objc_destroyWeak:
45 case Intrinsic::objc_initWeak:
46 case Intrinsic::objc_loadWeak:
47 case Intrinsic::objc_loadWeakRetained:
48 case Intrinsic::objc_moveWeak:
49 case Intrinsic::objc_release:
50 case Intrinsic::objc_retain:
51 case Intrinsic::objc_retainAutorelease:
52 case Intrinsic::objc_retainAutoreleaseReturnValue:
53 case Intrinsic::objc_retainAutoreleasedReturnValue:
54 case Intrinsic::objc_retainBlock:
55 case Intrinsic::objc_storeStrong:
56 case Intrinsic::objc_storeWeak:
57 case Intrinsic::objc_unsafeClaimAutoreleasedReturnValue:
58 case Intrinsic::objc_retainedObject:
59 case Intrinsic::objc_unretainedObject:
60 case Intrinsic::objc_unretainedPointer:
61 case Intrinsic::objc_retain_autorelease:
62 case Intrinsic::objc_sync_enter:
63 case Intrinsic::objc_sync_exit:
64 return true;
65 default:
66 return false;
67 }
68}
69
70//===----------------------------------------------------------------------===//
71/// DbgVariableIntrinsic - This is the common base class for debug info
72/// intrinsics for variables.
73///
74
75iterator_range<location_op_iterator> RawLocationWrapper::location_ops() const {
76 Metadata *MD = getRawLocation();
77 assert(MD && "First operand of DbgVariableIntrinsic should be non-null.");
78 // If operand is ValueAsMetadata, return a range over just that operand.
79 if (auto *VAM = dyn_cast<ValueAsMetadata>(Val: MD)) {
80 return {location_op_iterator(VAM), location_op_iterator(VAM + 1)};
81 }
82 // If operand is DIArgList, return a range over its args.
83 if (auto *AL = dyn_cast<DIArgList>(Val: MD))
84 return {location_op_iterator(AL->args_begin()),
85 location_op_iterator(AL->args_end())};
86 // Operand must be an empty metadata tuple, so return empty iterator.
87 return {location_op_iterator(static_cast<ValueAsMetadata *>(nullptr)),
88 location_op_iterator(static_cast<ValueAsMetadata *>(nullptr))};
89}
90
91iterator_range<location_op_iterator>
92DbgVariableIntrinsic::location_ops() const {
93 return getWrappedLocation().location_ops();
94}
95
96Value *DbgVariableIntrinsic::getVariableLocationOp(unsigned OpIdx) const {
97 return getWrappedLocation().getVariableLocationOp(OpIdx);
98}
99
100Value *RawLocationWrapper::getVariableLocationOp(unsigned OpIdx) const {
101 Metadata *MD = getRawLocation();
102 assert(MD && "First operand of DbgVariableIntrinsic should be non-null.");
103 if (auto *AL = dyn_cast<DIArgList>(Val: MD))
104 return AL->getArgs()[OpIdx]->getValue();
105 if (isa<MDNode>(Val: MD))
106 return nullptr;
107 assert(
108 isa<ValueAsMetadata>(MD) &&
109 "Attempted to get location operand from DbgVariableIntrinsic with none.");
110 auto *V = cast<ValueAsMetadata>(Val: MD);
111 assert(OpIdx == 0 && "Operand Index must be 0 for a debug intrinsic with a "
112 "single location operand.");
113 return V->getValue();
114}
115
116static ValueAsMetadata *getAsMetadata(Value *V) {
117 return isa<MetadataAsValue>(Val: V) ? dyn_cast<ValueAsMetadata>(
118 Val: cast<MetadataAsValue>(Val: V)->getMetadata())
119 : ValueAsMetadata::get(V);
120}
121
122void DbgVariableIntrinsic::replaceVariableLocationOp(Value *OldValue,
123 Value *NewValue,
124 bool AllowEmpty) {
125 // If OldValue is used as the address part of a dbg.assign intrinsic replace
126 // it with NewValue and return true.
127 auto ReplaceDbgAssignAddress = [this, OldValue, NewValue]() -> bool {
128 auto *DAI = dyn_cast<DbgAssignIntrinsic>(Val: this);
129 if (!DAI || OldValue != DAI->getAddress())
130 return false;
131 DAI->setAddress(NewValue);
132 return true;
133 };
134 bool DbgAssignAddrReplaced = ReplaceDbgAssignAddress();
135 (void)DbgAssignAddrReplaced;
136
137 assert(NewValue && "Values must be non-null");
138 auto Locations = location_ops();
139 auto OldIt = find(Range&: Locations, Val: OldValue);
140 if (OldIt == Locations.end()) {
141 if (AllowEmpty || DbgAssignAddrReplaced)
142 return;
143 assert(DbgAssignAddrReplaced &&
144 "OldValue must be dbg.assign addr if unused in DIArgList");
145 return;
146 }
147
148 assert(OldIt != Locations.end() && "OldValue must be a current location");
149 if (!hasArgList()) {
150 Value *NewOperand = isa<MetadataAsValue>(Val: NewValue)
151 ? NewValue
152 : MetadataAsValue::get(
153 Context&: getContext(), MD: ValueAsMetadata::get(V: NewValue));
154 return setArgOperand(i: 0, v: NewOperand);
155 }
156 SmallVector<ValueAsMetadata *, 4> MDs;
157 ValueAsMetadata *NewOperand = getAsMetadata(V: NewValue);
158 for (auto *VMD : Locations)
159 MDs.push_back(Elt: VMD == *OldIt ? NewOperand : getAsMetadata(V: VMD));
160 setArgOperand(
161 i: 0, v: MetadataAsValue::get(Context&: getContext(), MD: DIArgList::get(Context&: getContext(), Args: MDs)));
162}
163void DbgVariableIntrinsic::replaceVariableLocationOp(unsigned OpIdx,
164 Value *NewValue) {
165 assert(OpIdx < getNumVariableLocationOps() && "Invalid Operand Index");
166 if (!hasArgList()) {
167 Value *NewOperand = isa<MetadataAsValue>(Val: NewValue)
168 ? NewValue
169 : MetadataAsValue::get(
170 Context&: getContext(), MD: ValueAsMetadata::get(V: NewValue));
171 return setArgOperand(i: 0, v: NewOperand);
172 }
173 SmallVector<ValueAsMetadata *, 4> MDs;
174 ValueAsMetadata *NewOperand = getAsMetadata(V: NewValue);
175 for (unsigned Idx = 0; Idx < getNumVariableLocationOps(); ++Idx)
176 MDs.push_back(Elt: Idx == OpIdx ? NewOperand
177 : getAsMetadata(V: getVariableLocationOp(OpIdx: Idx)));
178 setArgOperand(
179 i: 0, v: MetadataAsValue::get(Context&: getContext(), MD: DIArgList::get(Context&: getContext(), Args: MDs)));
180}
181
182void DbgVariableIntrinsic::addVariableLocationOps(ArrayRef<Value *> NewValues,
183 DIExpression *NewExpr) {
184 assert(NewExpr->hasAllLocationOps(getNumVariableLocationOps() +
185 NewValues.size()) &&
186 "NewExpr for debug variable intrinsic does not reference every "
187 "location operand.");
188 assert(!is_contained(NewValues, nullptr) && "New values must be non-null");
189 setArgOperand(i: 2, v: MetadataAsValue::get(Context&: getContext(), MD: NewExpr));
190 SmallVector<ValueAsMetadata *, 4> MDs;
191 for (auto *VMD : location_ops())
192 MDs.push_back(Elt: getAsMetadata(V: VMD));
193 for (auto *VMD : NewValues)
194 MDs.push_back(Elt: getAsMetadata(V: VMD));
195 setArgOperand(
196 i: 0, v: MetadataAsValue::get(Context&: getContext(), MD: DIArgList::get(Context&: getContext(), Args: MDs)));
197}
198
199std::optional<uint64_t> DbgVariableIntrinsic::getFragmentSizeInBits() const {
200 if (auto Fragment = getExpression()->getFragmentInfo())
201 return Fragment->SizeInBits;
202 return getVariable()->getSizeInBits();
203}
204
205Value *DbgAssignIntrinsic::getAddress() const {
206 auto *MD = getRawAddress();
207 if (auto *V = dyn_cast<ValueAsMetadata>(Val: MD))
208 return V->getValue();
209
210 // When the value goes to null, it gets replaced by an empty MDNode.
211 assert(!cast<MDNode>(MD)->getNumOperands() && "Expected an empty MDNode");
212 return nullptr;
213}
214
215void DbgAssignIntrinsic::setAssignId(DIAssignID *New) {
216 setOperand(i: OpAssignID, v: MetadataAsValue::get(Context&: getContext(), MD: New));
217}
218
219void DbgAssignIntrinsic::setAddress(Value *V) {
220 setOperand(i: OpAddress,
221 v: MetadataAsValue::get(Context&: getContext(), MD: ValueAsMetadata::get(V)));
222}
223
224void DbgAssignIntrinsic::setKillAddress() {
225 if (isKillAddress())
226 return;
227 setAddress(PoisonValue::get(T: getAddress()->getType()));
228}
229
230bool DbgAssignIntrinsic::isKillAddress() const {
231 Value *Addr = getAddress();
232 return !Addr || isa<UndefValue>(Val: Addr);
233}
234
235void DbgAssignIntrinsic::setValue(Value *V) {
236 setOperand(i: OpValue,
237 v: MetadataAsValue::get(Context&: getContext(), MD: ValueAsMetadata::get(V)));
238}
239
240ConstantInt *InstrProfCntrInstBase::getNumCounters() const {
241 if (InstrProfValueProfileInst::classof(I: this))
242 llvm_unreachable("InstrProfValueProfileInst does not have counters!");
243 return cast<ConstantInt>(Val: getArgOperand(i: 2));
244}
245
246ConstantInt *InstrProfCntrInstBase::getIndex() const {
247 if (InstrProfValueProfileInst::classof(I: this))
248 llvm_unreachable("Please use InstrProfValueProfileInst::getIndex()");
249 return cast<ConstantInt>(Val: getArgOperand(i: 3));
250}
251
252void InstrProfCntrInstBase::setIndex(uint32_t Idx) {
253 assert(isa<InstrProfCntrInstBase>(this));
254 setArgOperand(i: 3, v: ConstantInt::get(Ty: Type::getInt32Ty(C&: getContext()), V: Idx));
255}
256
257Value *InstrProfIncrementInst::getStep() const {
258 if (InstrProfIncrementInstStep::classof(I: this)) {
259 return getArgOperand(i: 4);
260 }
261 const Module *M = getModule();
262 LLVMContext &Context = M->getContext();
263 return ConstantInt::get(Ty: Type::getInt64Ty(C&: Context), V: 1);
264}
265
266Value *InstrProfCallsite::getCallee() const {
267 if (isa<InstrProfCallsite>(Val: this))
268 return getArgOperand(i: 4);
269 return nullptr;
270}
271
272void InstrProfCallsite::setCallee(Value *Callee) {
273 assert(isa<InstrProfCallsite>(this));
274 setArgOperand(i: 4, v: Callee);
275}
276
277std::optional<RoundingMode> ConstrainedFPIntrinsic::getRoundingMode() const {
278 unsigned NumOperands = arg_size();
279 Metadata *MD = nullptr;
280 auto *MAV = dyn_cast<MetadataAsValue>(Val: getArgOperand(i: NumOperands - 2));
281 if (MAV)
282 MD = MAV->getMetadata();
283 if (!MD || !isa<MDString>(Val: MD))
284 return std::nullopt;
285 return convertStrToRoundingMode(cast<MDString>(Val: MD)->getString());
286}
287
288std::optional<fp::ExceptionBehavior>
289ConstrainedFPIntrinsic::getExceptionBehavior() const {
290 unsigned NumOperands = arg_size();
291 Metadata *MD = nullptr;
292 auto *MAV = dyn_cast<MetadataAsValue>(Val: getArgOperand(i: NumOperands - 1));
293 if (MAV)
294 MD = MAV->getMetadata();
295 if (!MD || !isa<MDString>(Val: MD))
296 return std::nullopt;
297 return convertStrToExceptionBehavior(cast<MDString>(Val: MD)->getString());
298}
299
300bool ConstrainedFPIntrinsic::isDefaultFPEnvironment() const {
301 std::optional<fp::ExceptionBehavior> Except = getExceptionBehavior();
302 if (Except) {
303 if (*Except != fp::ebIgnore)
304 return false;
305 }
306
307 std::optional<RoundingMode> Rounding = getRoundingMode();
308 if (Rounding) {
309 if (*Rounding != RoundingMode::NearestTiesToEven)
310 return false;
311 }
312
313 return true;
314}
315
316static FCmpInst::Predicate getFPPredicateFromMD(const Value *Op) {
317 Metadata *MD = cast<MetadataAsValue>(Val: Op)->getMetadata();
318 if (!MD || !isa<MDString>(Val: MD))
319 return FCmpInst::BAD_FCMP_PREDICATE;
320 return StringSwitch<FCmpInst::Predicate>(cast<MDString>(Val: MD)->getString())
321 .Case(S: "oeq", Value: FCmpInst::FCMP_OEQ)
322 .Case(S: "ogt", Value: FCmpInst::FCMP_OGT)
323 .Case(S: "oge", Value: FCmpInst::FCMP_OGE)
324 .Case(S: "olt", Value: FCmpInst::FCMP_OLT)
325 .Case(S: "ole", Value: FCmpInst::FCMP_OLE)
326 .Case(S: "one", Value: FCmpInst::FCMP_ONE)
327 .Case(S: "ord", Value: FCmpInst::FCMP_ORD)
328 .Case(S: "uno", Value: FCmpInst::FCMP_UNO)
329 .Case(S: "ueq", Value: FCmpInst::FCMP_UEQ)
330 .Case(S: "ugt", Value: FCmpInst::FCMP_UGT)
331 .Case(S: "uge", Value: FCmpInst::FCMP_UGE)
332 .Case(S: "ult", Value: FCmpInst::FCMP_ULT)
333 .Case(S: "ule", Value: FCmpInst::FCMP_ULE)
334 .Case(S: "une", Value: FCmpInst::FCMP_UNE)
335 .Default(Value: FCmpInst::BAD_FCMP_PREDICATE);
336}
337
338FCmpInst::Predicate ConstrainedFPCmpIntrinsic::getPredicate() const {
339 return getFPPredicateFromMD(Op: getArgOperand(i: 2));
340}
341
342unsigned ConstrainedFPIntrinsic::getNonMetadataArgCount() const {
343 // All constrained fp intrinsics have "fpexcept" metadata.
344 unsigned NumArgs = arg_size() - 1;
345
346 // Some intrinsics have "round" metadata.
347 if (Intrinsic::hasConstrainedFPRoundingModeOperand(QID: getIntrinsicID()))
348 NumArgs -= 1;
349
350 // Compare intrinsics take their predicate as metadata.
351 if (isa<ConstrainedFPCmpIntrinsic>(Val: this))
352 NumArgs -= 1;
353
354 return NumArgs;
355}
356
357bool ConstrainedFPIntrinsic::classof(const IntrinsicInst *I) {
358 return Intrinsic::isConstrainedFPIntrinsic(QID: I->getIntrinsicID());
359}
360
361ElementCount VPIntrinsic::getStaticVectorLength() const {
362 auto GetVectorLengthOfType = [](const Type *T) -> ElementCount {
363 const auto *VT = cast<VectorType>(Val: T);
364 auto ElemCount = VT->getElementCount();
365 return ElemCount;
366 };
367
368 Value *VPMask = getMaskParam();
369 if (!VPMask) {
370 assert((getIntrinsicID() == Intrinsic::vp_merge ||
371 getIntrinsicID() == Intrinsic::vp_select) &&
372 "Unexpected VP intrinsic without mask operand");
373 return GetVectorLengthOfType(getType());
374 }
375 return GetVectorLengthOfType(VPMask->getType());
376}
377
378Value *VPIntrinsic::getMaskParam() const {
379 if (auto MaskPos = getMaskParamPos(IntrinsicID: getIntrinsicID()))
380 return getArgOperand(i: *MaskPos);
381 return nullptr;
382}
383
384void VPIntrinsic::setMaskParam(Value *NewMask) {
385 auto MaskPos = getMaskParamPos(IntrinsicID: getIntrinsicID());
386 setArgOperand(i: *MaskPos, v: NewMask);
387}
388
389Value *VPIntrinsic::getVectorLengthParam() const {
390 if (auto EVLPos = getVectorLengthParamPos(IntrinsicID: getIntrinsicID()))
391 return getArgOperand(i: *EVLPos);
392 return nullptr;
393}
394
395void VPIntrinsic::setVectorLengthParam(Value *NewEVL) {
396 auto EVLPos = getVectorLengthParamPos(IntrinsicID: getIntrinsicID());
397 setArgOperand(i: *EVLPos, v: NewEVL);
398}
399
400std::optional<unsigned>
401VPIntrinsic::getMaskParamPos(Intrinsic::ID IntrinsicID) {
402 switch (IntrinsicID) {
403 default:
404 return std::nullopt;
405
406#define BEGIN_REGISTER_VP_INTRINSIC(VPID, MASKPOS, VLENPOS) \
407 case Intrinsic::VPID: \
408 return MASKPOS;
409#include "llvm/IR/VPIntrinsics.def"
410 }
411}
412
413std::optional<unsigned>
414VPIntrinsic::getVectorLengthParamPos(Intrinsic::ID IntrinsicID) {
415 switch (IntrinsicID) {
416 default:
417 return std::nullopt;
418
419#define BEGIN_REGISTER_VP_INTRINSIC(VPID, MASKPOS, VLENPOS) \
420 case Intrinsic::VPID: \
421 return VLENPOS;
422#include "llvm/IR/VPIntrinsics.def"
423 }
424}
425
426/// \return the alignment of the pointer used by this load/store/gather or
427/// scatter.
428MaybeAlign VPIntrinsic::getPointerAlignment() const {
429 std::optional<unsigned> PtrParamOpt =
430 getMemoryPointerParamPos(getIntrinsicID());
431 assert(PtrParamOpt && "no pointer argument!");
432 return getParamAlign(ArgNo: *PtrParamOpt);
433}
434
435/// \return The pointer operand of this load,store, gather or scatter.
436Value *VPIntrinsic::getMemoryPointerParam() const {
437 if (auto PtrParamOpt = getMemoryPointerParamPos(getIntrinsicID()))
438 return getArgOperand(i: *PtrParamOpt);
439 return nullptr;
440}
441
442std::optional<unsigned>
443VPIntrinsic::getMemoryPointerParamPos(Intrinsic::ID VPID) {
444 switch (VPID) {
445 default:
446 return std::nullopt;
447 case Intrinsic::vp_store:
448 case Intrinsic::vp_scatter:
449 case Intrinsic::experimental_vp_strided_store:
450 return 1;
451 case Intrinsic::vp_load:
452 case Intrinsic::vp_load_ff:
453 case Intrinsic::vp_gather:
454 case Intrinsic::experimental_vp_strided_load:
455 return 0;
456 }
457}
458
459/// \return The data (payload) operand of this store or scatter.
460Value *VPIntrinsic::getMemoryDataParam() const {
461 auto DataParamOpt = getMemoryDataParamPos(getIntrinsicID());
462 if (!DataParamOpt)
463 return nullptr;
464 return getArgOperand(i: *DataParamOpt);
465}
466
467std::optional<unsigned> VPIntrinsic::getMemoryDataParamPos(Intrinsic::ID VPID) {
468 switch (VPID) {
469 default:
470 return std::nullopt;
471 case Intrinsic::vp_store:
472 case Intrinsic::vp_scatter:
473 case Intrinsic::experimental_vp_strided_store:
474 return 0;
475 }
476}
477
478constexpr bool isVPIntrinsic(Intrinsic::ID ID) {
479 switch (ID) {
480 default:
481 break;
482#define BEGIN_REGISTER_VP_INTRINSIC(VPID, MASKPOS, VLENPOS) \
483 case Intrinsic::VPID: \
484 return true;
485#include "llvm/IR/VPIntrinsics.def"
486 }
487 return false;
488}
489
490bool VPIntrinsic::isVPIntrinsic(Intrinsic::ID ID) {
491 return ::isVPIntrinsic(ID);
492}
493
494// Equivalent non-predicated opcode
495constexpr static std::optional<unsigned>
496getFunctionalOpcodeForVP(Intrinsic::ID ID) {
497 switch (ID) {
498 default:
499 break;
500#define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
501#define VP_PROPERTY_FUNCTIONAL_OPC(OPC) return Instruction::OPC;
502#define END_REGISTER_VP_INTRINSIC(VPID) break;
503#include "llvm/IR/VPIntrinsics.def"
504 }
505 return std::nullopt;
506}
507
508std::optional<unsigned>
509VPIntrinsic::getFunctionalOpcodeForVP(Intrinsic::ID ID) {
510 return ::getFunctionalOpcodeForVP(ID);
511}
512
513// Equivalent non-predicated intrinsic ID
514constexpr static std::optional<Intrinsic::ID>
515getFunctionalIntrinsicIDForVP(Intrinsic::ID ID) {
516 switch (ID) {
517 default:
518 break;
519#define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
520#define VP_PROPERTY_FUNCTIONAL_INTRINSIC(INTRIN) return Intrinsic::INTRIN;
521#define END_REGISTER_VP_INTRINSIC(VPID) break;
522#include "llvm/IR/VPIntrinsics.def"
523 }
524 return std::nullopt;
525}
526
527std::optional<Intrinsic::ID>
528VPIntrinsic::getFunctionalIntrinsicIDForVP(Intrinsic::ID ID) {
529 return ::getFunctionalIntrinsicIDForVP(ID);
530}
531
532constexpr static bool doesVPHaveNoFunctionalEquivalent(Intrinsic::ID ID) {
533 switch (ID) {
534 default:
535 break;
536#define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
537#define VP_PROPERTY_NO_FUNCTIONAL return true;
538#define END_REGISTER_VP_INTRINSIC(VPID) break;
539#include "llvm/IR/VPIntrinsics.def"
540 }
541 return false;
542}
543
544// All VP intrinsics should have an equivalent non-VP opcode or intrinsic
545// defined, or be marked that they don't have one.
546#define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) \
547 static_assert(doesVPHaveNoFunctionalEquivalent(Intrinsic::VPID) || \
548 getFunctionalOpcodeForVP(Intrinsic::VPID) || \
549 getFunctionalIntrinsicIDForVP(Intrinsic::VPID));
550#include "llvm/IR/VPIntrinsics.def"
551
552// Equivalent non-predicated constrained intrinsic
553std::optional<Intrinsic::ID>
554VPIntrinsic::getConstrainedIntrinsicIDForVP(Intrinsic::ID ID) {
555 switch (ID) {
556 default:
557 break;
558#define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
559#define VP_PROPERTY_CONSTRAINEDFP(CID) return Intrinsic::CID;
560#define END_REGISTER_VP_INTRINSIC(VPID) break;
561#include "llvm/IR/VPIntrinsics.def"
562 }
563 return std::nullopt;
564}
565
566Intrinsic::ID VPIntrinsic::getForOpcode(unsigned IROPC) {
567 switch (IROPC) {
568 default:
569 break;
570
571#define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) break;
572#define VP_PROPERTY_FUNCTIONAL_OPC(OPC) case Instruction::OPC:
573#define END_REGISTER_VP_INTRINSIC(VPID) return Intrinsic::VPID;
574#include "llvm/IR/VPIntrinsics.def"
575 }
576 return Intrinsic::not_intrinsic;
577}
578
579constexpr static Intrinsic::ID getForIntrinsic(Intrinsic::ID Id) {
580 if (::isVPIntrinsic(ID: Id))
581 return Id;
582
583 switch (Id) {
584 default:
585 break;
586#define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) break;
587#define VP_PROPERTY_FUNCTIONAL_INTRINSIC(INTRIN) case Intrinsic::INTRIN:
588#define END_REGISTER_VP_INTRINSIC(VPID) return Intrinsic::VPID;
589#include "llvm/IR/VPIntrinsics.def"
590 }
591 return Intrinsic::not_intrinsic;
592}
593
594Intrinsic::ID VPIntrinsic::getForIntrinsic(Intrinsic::ID Id) {
595 return ::getForIntrinsic(Id);
596}
597
598bool VPIntrinsic::canIgnoreVectorLengthParam() const {
599 using namespace PatternMatch;
600
601 ElementCount EC = getStaticVectorLength();
602
603 // No vlen param - no lanes masked-off by it.
604 auto *VLParam = getVectorLengthParam();
605 if (!VLParam)
606 return true;
607
608 // Note that the VP intrinsic causes undefined behavior if the Explicit Vector
609 // Length parameter is strictly greater-than the number of vector elements of
610 // the operation. This function returns true when this is detected statically
611 // in the IR.
612
613 // Check whether "W == vscale * EC.getKnownMinValue()"
614 if (EC.isScalable()) {
615 // Compare vscale patterns
616 uint64_t VScaleFactor;
617 if (match(V: VLParam, P: m_Mul(L: m_VScale(), R: m_ConstantInt(V&: VScaleFactor))))
618 return VScaleFactor >= EC.getKnownMinValue();
619 return (EC.getKnownMinValue() == 1) && match(V: VLParam, P: m_VScale());
620 }
621
622 // standard SIMD operation
623 const auto *VLConst = dyn_cast<ConstantInt>(Val: VLParam);
624 if (!VLConst)
625 return false;
626
627 uint64_t VLNum = VLConst->getZExtValue();
628 if (VLNum >= EC.getKnownMinValue())
629 return true;
630
631 return false;
632}
633
634Function *VPIntrinsic::getOrInsertDeclarationForParams(
635 Module *M, Intrinsic::ID VPID, Type *ReturnType, ArrayRef<Value *> Params) {
636 assert(isVPIntrinsic(VPID) && "not a VP intrinsic");
637 Function *VPFunc;
638 switch (VPID) {
639 default: {
640 Type *OverloadTy = Params[0]->getType();
641 if (VPReductionIntrinsic::isVPReduction(ID: VPID))
642 OverloadTy =
643 Params[*VPReductionIntrinsic::getVectorParamPos(ID: VPID)]->getType();
644
645 VPFunc = Intrinsic::getOrInsertDeclaration(M, id: VPID, OverloadTys: OverloadTy);
646 break;
647 }
648 case Intrinsic::vp_trunc:
649 case Intrinsic::vp_sext:
650 case Intrinsic::vp_zext:
651 case Intrinsic::vp_fptoui:
652 case Intrinsic::vp_fptosi:
653 case Intrinsic::vp_uitofp:
654 case Intrinsic::vp_sitofp:
655 case Intrinsic::vp_fptrunc:
656 case Intrinsic::vp_fpext:
657 case Intrinsic::vp_ptrtoint:
658 case Intrinsic::vp_inttoptr:
659 case Intrinsic::vp_lrint:
660 case Intrinsic::vp_llrint:
661 case Intrinsic::vp_cttz_elts:
662 VPFunc = Intrinsic::getOrInsertDeclaration(
663 M, id: VPID, OverloadTys: {ReturnType, Params[0]->getType()});
664 break;
665 case Intrinsic::vp_is_fpclass:
666 VPFunc = Intrinsic::getOrInsertDeclaration(M, id: VPID, OverloadTys: {Params[0]->getType()});
667 break;
668 case Intrinsic::vp_merge:
669 case Intrinsic::vp_select:
670 VPFunc = Intrinsic::getOrInsertDeclaration(M, id: VPID, OverloadTys: {Params[1]->getType()});
671 break;
672 case Intrinsic::vp_load:
673 VPFunc = Intrinsic::getOrInsertDeclaration(
674 M, id: VPID, OverloadTys: {ReturnType, Params[0]->getType()});
675 break;
676 case Intrinsic::vp_load_ff:
677 VPFunc = Intrinsic::getOrInsertDeclaration(
678 M, id: VPID, OverloadTys: {ReturnType->getStructElementType(N: 0), Params[0]->getType()});
679 break;
680 case Intrinsic::experimental_vp_strided_load:
681 VPFunc = Intrinsic::getOrInsertDeclaration(
682 M, id: VPID, OverloadTys: {ReturnType, Params[0]->getType(), Params[1]->getType()});
683 break;
684 case Intrinsic::vp_gather:
685 VPFunc = Intrinsic::getOrInsertDeclaration(
686 M, id: VPID, OverloadTys: {ReturnType, Params[0]->getType()});
687 break;
688 case Intrinsic::vp_store:
689 VPFunc = Intrinsic::getOrInsertDeclaration(
690 M, id: VPID, OverloadTys: {Params[0]->getType(), Params[1]->getType()});
691 break;
692 case Intrinsic::experimental_vp_strided_store:
693 VPFunc = Intrinsic::getOrInsertDeclaration(
694 M, id: VPID,
695 OverloadTys: {Params[0]->getType(), Params[1]->getType(), Params[2]->getType()});
696 break;
697 case Intrinsic::vp_scatter:
698 VPFunc = Intrinsic::getOrInsertDeclaration(
699 M, id: VPID, OverloadTys: {Params[0]->getType(), Params[1]->getType()});
700 break;
701 }
702 assert(VPFunc && "Could not declare VP intrinsic");
703 return VPFunc;
704}
705
706bool VPReductionIntrinsic::isVPReduction(Intrinsic::ID ID) {
707 switch (ID) {
708 case Intrinsic::vp_reduce_add:
709 case Intrinsic::vp_reduce_mul:
710 case Intrinsic::vp_reduce_and:
711 case Intrinsic::vp_reduce_or:
712 case Intrinsic::vp_reduce_xor:
713 case Intrinsic::vp_reduce_smax:
714 case Intrinsic::vp_reduce_smin:
715 case Intrinsic::vp_reduce_umax:
716 case Intrinsic::vp_reduce_umin:
717 case Intrinsic::vp_reduce_fmax:
718 case Intrinsic::vp_reduce_fmin:
719 case Intrinsic::vp_reduce_fmaximum:
720 case Intrinsic::vp_reduce_fminimum:
721 case Intrinsic::vp_reduce_fadd:
722 case Intrinsic::vp_reduce_fmul:
723 return true;
724 default:
725 return false;
726 }
727}
728
729bool VPCastIntrinsic::isVPCast(Intrinsic::ID ID) {
730 // All of the vp.casts correspond to instructions
731 if (std::optional<unsigned> Opc = getFunctionalOpcodeForVP(ID))
732 return Instruction::isCast(Opcode: *Opc);
733 return false;
734}
735
736bool VPCmpIntrinsic::isVPCmp(Intrinsic::ID ID) {
737 switch (ID) {
738 default:
739 return false;
740 case Intrinsic::vp_fcmp:
741 case Intrinsic::vp_icmp:
742 return true;
743 }
744}
745
746bool VPBinOpIntrinsic::isVPBinOp(Intrinsic::ID ID) {
747 switch (ID) {
748 default:
749 break;
750#define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
751#define VP_PROPERTY_BINARYOP return true;
752#define END_REGISTER_VP_INTRINSIC(VPID) break;
753#include "llvm/IR/VPIntrinsics.def"
754 }
755 return false;
756}
757
758static ICmpInst::Predicate getIntPredicateFromMD(const Value *Op) {
759 Metadata *MD = cast<MetadataAsValue>(Val: Op)->getMetadata();
760 if (!MD || !isa<MDString>(Val: MD))
761 return ICmpInst::BAD_ICMP_PREDICATE;
762 return StringSwitch<ICmpInst::Predicate>(cast<MDString>(Val: MD)->getString())
763 .Case(S: "eq", Value: ICmpInst::ICMP_EQ)
764 .Case(S: "ne", Value: ICmpInst::ICMP_NE)
765 .Case(S: "ugt", Value: ICmpInst::ICMP_UGT)
766 .Case(S: "uge", Value: ICmpInst::ICMP_UGE)
767 .Case(S: "ult", Value: ICmpInst::ICMP_ULT)
768 .Case(S: "ule", Value: ICmpInst::ICMP_ULE)
769 .Case(S: "sgt", Value: ICmpInst::ICMP_SGT)
770 .Case(S: "sge", Value: ICmpInst::ICMP_SGE)
771 .Case(S: "slt", Value: ICmpInst::ICMP_SLT)
772 .Case(S: "sle", Value: ICmpInst::ICMP_SLE)
773 .Default(Value: ICmpInst::BAD_ICMP_PREDICATE);
774}
775
776CmpInst::Predicate VPCmpIntrinsic::getPredicate() const {
777 assert(isVPCmp(getIntrinsicID()));
778 return getIntrinsicID() == Intrinsic::vp_fcmp
779 ? getFPPredicateFromMD(Op: getArgOperand(i: 2))
780 : getIntPredicateFromMD(Op: getArgOperand(i: 2));
781}
782
783unsigned VPReductionIntrinsic::getVectorParamPos() const {
784 return *VPReductionIntrinsic::getVectorParamPos(ID: getIntrinsicID());
785}
786
787unsigned VPReductionIntrinsic::getStartParamPos() const {
788 return *VPReductionIntrinsic::getStartParamPos(ID: getIntrinsicID());
789}
790
791std::optional<unsigned>
792VPReductionIntrinsic::getVectorParamPos(Intrinsic::ID ID) {
793 if (isVPReduction(ID))
794 return 1;
795 return std::nullopt;
796}
797
798std::optional<unsigned>
799VPReductionIntrinsic::getStartParamPos(Intrinsic::ID ID) {
800 if (isVPReduction(ID))
801 return 0;
802 return std::nullopt;
803}
804
805Instruction::BinaryOps BinaryOpIntrinsic::getBinaryOp() const {
806 switch (getIntrinsicID()) {
807 case Intrinsic::uadd_with_overflow:
808 case Intrinsic::sadd_with_overflow:
809 case Intrinsic::uadd_sat:
810 case Intrinsic::sadd_sat:
811 return Instruction::Add;
812 case Intrinsic::usub_with_overflow:
813 case Intrinsic::ssub_with_overflow:
814 case Intrinsic::usub_sat:
815 case Intrinsic::ssub_sat:
816 return Instruction::Sub;
817 case Intrinsic::umul_with_overflow:
818 case Intrinsic::smul_with_overflow:
819 return Instruction::Mul;
820 default:
821 llvm_unreachable("Invalid intrinsic");
822 }
823}
824
825bool BinaryOpIntrinsic::isSigned() const {
826 switch (getIntrinsicID()) {
827 case Intrinsic::sadd_with_overflow:
828 case Intrinsic::ssub_with_overflow:
829 case Intrinsic::smul_with_overflow:
830 case Intrinsic::sadd_sat:
831 case Intrinsic::ssub_sat:
832 return true;
833 default:
834 return false;
835 }
836}
837
838unsigned BinaryOpIntrinsic::getNoWrapKind() const {
839 if (isSigned())
840 return OverflowingBinaryOperator::NoSignedWrap;
841 else
842 return OverflowingBinaryOperator::NoUnsignedWrap;
843}
844
845const Value *GCProjectionInst::getStatepoint() const {
846 const Value *Token = getArgOperand(i: 0);
847 if (isa<UndefValue>(Val: Token))
848 return Token;
849
850 // Treat none token as if it was undef here
851 if (isa<ConstantTokenNone>(Val: Token))
852 return UndefValue::get(T: Token->getType());
853
854 // This takes care both of relocates for call statepoints and relocates
855 // on normal path of invoke statepoint.
856 if (!isa<LandingPadInst>(Val: Token))
857 return cast<GCStatepointInst>(Val: Token);
858
859 // This relocate is on exceptional path of an invoke statepoint
860 const BasicBlock *InvokeBB =
861 cast<Instruction>(Val: Token)->getParent()->getUniquePredecessor();
862
863 assert(InvokeBB && "safepoints should have unique landingpads");
864 assert(InvokeBB->getTerminator() &&
865 "safepoint block should be well formed");
866
867 return cast<GCStatepointInst>(Val: InvokeBB->getTerminator());
868}
869
870Value *GCRelocateInst::getBasePtr() const {
871 auto Statepoint = getStatepoint();
872 if (isa<UndefValue>(Val: Statepoint))
873 return UndefValue::get(T: Statepoint->getType());
874
875 auto *GCInst = cast<GCStatepointInst>(Val: Statepoint);
876 if (auto Opt = GCInst->getOperandBundle(ID: LLVMContext::OB_gc_live))
877 return *(Opt->Inputs.begin() + getBasePtrIndex());
878 return *(GCInst->arg_begin() + getBasePtrIndex());
879}
880
881Value *GCRelocateInst::getDerivedPtr() const {
882 auto *Statepoint = getStatepoint();
883 if (isa<UndefValue>(Val: Statepoint))
884 return UndefValue::get(T: Statepoint->getType());
885
886 auto *GCInst = cast<GCStatepointInst>(Val: Statepoint);
887 if (auto Opt = GCInst->getOperandBundle(ID: LLVMContext::OB_gc_live))
888 return *(Opt->Inputs.begin() + getDerivedPtrIndex());
889 return *(GCInst->arg_begin() + getDerivedPtrIndex());
890}
891
892ConvergenceControlInst *ConvergenceControlInst::CreateAnchor(BasicBlock &BB) {
893 Module *M = BB.getModule();
894 Function *Fn = Intrinsic::getOrInsertDeclaration(
895 M, id: llvm::Intrinsic::experimental_convergence_anchor);
896 auto *Call = CallInst::Create(Func: Fn, NameStr: "", InsertBefore: BB.getFirstInsertionPt());
897 return cast<ConvergenceControlInst>(Val: Call);
898}
899
900ConvergenceControlInst *ConvergenceControlInst::CreateEntry(BasicBlock &BB) {
901 Module *M = BB.getModule();
902 Function *Fn = Intrinsic::getOrInsertDeclaration(
903 M, id: llvm::Intrinsic::experimental_convergence_entry);
904 auto *Call = CallInst::Create(Func: Fn, NameStr: "", InsertBefore: BB.getFirstInsertionPt());
905 return cast<ConvergenceControlInst>(Val: Call);
906}
907
908ConvergenceControlInst *
909ConvergenceControlInst::CreateLoop(BasicBlock &BB,
910 ConvergenceControlInst *ParentToken) {
911 Module *M = BB.getModule();
912 Function *Fn = Intrinsic::getOrInsertDeclaration(
913 M, id: llvm::Intrinsic::experimental_convergence_loop);
914 llvm::Value *BundleArgs[] = {ParentToken};
915 llvm::OperandBundleDef OB("convergencectrl", BundleArgs);
916 auto *Call = CallInst::Create(Func: Fn, Args: {}, Bundles: {OB}, NameStr: "", InsertBefore: BB.getFirstInsertionPt());
917 return cast<ConvergenceControlInst>(Val: Call);
918}
919