Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
freebsd
GitHub Repository: freebsd/freebsd-src
Path: blob/main/contrib/llvm-project/llvm/lib/IR/IntrinsicInst.cpp
35233 views
1
//===-- IntrinsicInst.cpp - Intrinsic Instruction Wrappers ---------------===//
2
//
3
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4
// See https://llvm.org/LICENSE.txt for license information.
5
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6
//
7
//===----------------------------------------------------------------------===//
8
//
9
// This file implements methods that make it really easy to deal with intrinsic
10
// functions.
11
//
12
// All intrinsic function calls are instances of the call instruction, so these
13
// are all subclasses of the CallInst class. Note that none of these classes
14
// has state or virtual methods, which is an important part of this gross/neat
15
// hack working.
16
//
17
// In some cases, arguments to intrinsics need to be generic and are defined as
18
// type pointer to empty struct { }*. To access the real item of interest the
19
// cast instruction needs to be stripped away.
20
//
21
//===----------------------------------------------------------------------===//
22
23
#include "llvm/IR/IntrinsicInst.h"
24
#include "llvm/ADT/StringSwitch.h"
25
#include "llvm/IR/Constants.h"
26
#include "llvm/IR/DebugInfoMetadata.h"
27
#include "llvm/IR/Metadata.h"
28
#include "llvm/IR/Module.h"
29
#include "llvm/IR/Operator.h"
30
#include "llvm/IR/PatternMatch.h"
31
#include "llvm/IR/Statepoint.h"
32
#include <optional>
33
34
using namespace llvm;
35
36
bool IntrinsicInst::mayLowerToFunctionCall(Intrinsic::ID IID) {
37
switch (IID) {
38
case Intrinsic::objc_autorelease:
39
case Intrinsic::objc_autoreleasePoolPop:
40
case Intrinsic::objc_autoreleasePoolPush:
41
case Intrinsic::objc_autoreleaseReturnValue:
42
case Intrinsic::objc_copyWeak:
43
case Intrinsic::objc_destroyWeak:
44
case Intrinsic::objc_initWeak:
45
case Intrinsic::objc_loadWeak:
46
case Intrinsic::objc_loadWeakRetained:
47
case Intrinsic::objc_moveWeak:
48
case Intrinsic::objc_release:
49
case Intrinsic::objc_retain:
50
case Intrinsic::objc_retainAutorelease:
51
case Intrinsic::objc_retainAutoreleaseReturnValue:
52
case Intrinsic::objc_retainAutoreleasedReturnValue:
53
case Intrinsic::objc_retainBlock:
54
case Intrinsic::objc_storeStrong:
55
case Intrinsic::objc_storeWeak:
56
case Intrinsic::objc_unsafeClaimAutoreleasedReturnValue:
57
case Intrinsic::objc_retainedObject:
58
case Intrinsic::objc_unretainedObject:
59
case Intrinsic::objc_unretainedPointer:
60
case Intrinsic::objc_retain_autorelease:
61
case Intrinsic::objc_sync_enter:
62
case Intrinsic::objc_sync_exit:
63
return true;
64
default:
65
return false;
66
}
67
}
68
69
//===----------------------------------------------------------------------===//
70
/// DbgVariableIntrinsic - This is the common base class for debug info
71
/// intrinsics for variables.
72
///
73
74
iterator_range<location_op_iterator> RawLocationWrapper::location_ops() const {
75
Metadata *MD = getRawLocation();
76
assert(MD && "First operand of DbgVariableIntrinsic should be non-null.");
77
// If operand is ValueAsMetadata, return a range over just that operand.
78
if (auto *VAM = dyn_cast<ValueAsMetadata>(MD)) {
79
return {location_op_iterator(VAM), location_op_iterator(VAM + 1)};
80
}
81
// If operand is DIArgList, return a range over its args.
82
if (auto *AL = dyn_cast<DIArgList>(MD))
83
return {location_op_iterator(AL->args_begin()),
84
location_op_iterator(AL->args_end())};
85
// Operand must be an empty metadata tuple, so return empty iterator.
86
return {location_op_iterator(static_cast<ValueAsMetadata *>(nullptr)),
87
location_op_iterator(static_cast<ValueAsMetadata *>(nullptr))};
88
}
89
90
iterator_range<location_op_iterator>
91
DbgVariableIntrinsic::location_ops() const {
92
return getWrappedLocation().location_ops();
93
}
94
95
Value *DbgVariableIntrinsic::getVariableLocationOp(unsigned OpIdx) const {
96
return getWrappedLocation().getVariableLocationOp(OpIdx);
97
}
98
99
Value *RawLocationWrapper::getVariableLocationOp(unsigned OpIdx) const {
100
Metadata *MD = getRawLocation();
101
assert(MD && "First operand of DbgVariableIntrinsic should be non-null.");
102
if (auto *AL = dyn_cast<DIArgList>(MD))
103
return AL->getArgs()[OpIdx]->getValue();
104
if (isa<MDNode>(MD))
105
return nullptr;
106
assert(
107
isa<ValueAsMetadata>(MD) &&
108
"Attempted to get location operand from DbgVariableIntrinsic with none.");
109
auto *V = cast<ValueAsMetadata>(MD);
110
assert(OpIdx == 0 && "Operand Index must be 0 for a debug intrinsic with a "
111
"single location operand.");
112
return V->getValue();
113
}
114
115
static ValueAsMetadata *getAsMetadata(Value *V) {
116
return isa<MetadataAsValue>(V) ? dyn_cast<ValueAsMetadata>(
117
cast<MetadataAsValue>(V)->getMetadata())
118
: ValueAsMetadata::get(V);
119
}
120
121
void DbgVariableIntrinsic::replaceVariableLocationOp(Value *OldValue,
122
Value *NewValue,
123
bool AllowEmpty) {
124
// If OldValue is used as the address part of a dbg.assign intrinsic replace
125
// it with NewValue and return true.
126
auto ReplaceDbgAssignAddress = [this, OldValue, NewValue]() -> bool {
127
auto *DAI = dyn_cast<DbgAssignIntrinsic>(this);
128
if (!DAI || OldValue != DAI->getAddress())
129
return false;
130
DAI->setAddress(NewValue);
131
return true;
132
};
133
bool DbgAssignAddrReplaced = ReplaceDbgAssignAddress();
134
(void)DbgAssignAddrReplaced;
135
136
assert(NewValue && "Values must be non-null");
137
auto Locations = location_ops();
138
auto OldIt = find(Locations, OldValue);
139
if (OldIt == Locations.end()) {
140
if (AllowEmpty || DbgAssignAddrReplaced)
141
return;
142
assert(DbgAssignAddrReplaced &&
143
"OldValue must be dbg.assign addr if unused in DIArgList");
144
return;
145
}
146
147
assert(OldIt != Locations.end() && "OldValue must be a current location");
148
if (!hasArgList()) {
149
Value *NewOperand = isa<MetadataAsValue>(NewValue)
150
? NewValue
151
: MetadataAsValue::get(
152
getContext(), ValueAsMetadata::get(NewValue));
153
return setArgOperand(0, NewOperand);
154
}
155
SmallVector<ValueAsMetadata *, 4> MDs;
156
ValueAsMetadata *NewOperand = getAsMetadata(NewValue);
157
for (auto *VMD : Locations)
158
MDs.push_back(VMD == *OldIt ? NewOperand : getAsMetadata(VMD));
159
setArgOperand(
160
0, MetadataAsValue::get(getContext(), DIArgList::get(getContext(), MDs)));
161
}
162
void DbgVariableIntrinsic::replaceVariableLocationOp(unsigned OpIdx,
163
Value *NewValue) {
164
assert(OpIdx < getNumVariableLocationOps() && "Invalid Operand Index");
165
if (!hasArgList()) {
166
Value *NewOperand = isa<MetadataAsValue>(NewValue)
167
? NewValue
168
: MetadataAsValue::get(
169
getContext(), ValueAsMetadata::get(NewValue));
170
return setArgOperand(0, NewOperand);
171
}
172
SmallVector<ValueAsMetadata *, 4> MDs;
173
ValueAsMetadata *NewOperand = getAsMetadata(NewValue);
174
for (unsigned Idx = 0; Idx < getNumVariableLocationOps(); ++Idx)
175
MDs.push_back(Idx == OpIdx ? NewOperand
176
: getAsMetadata(getVariableLocationOp(Idx)));
177
setArgOperand(
178
0, MetadataAsValue::get(getContext(), DIArgList::get(getContext(), MDs)));
179
}
180
181
void DbgVariableIntrinsic::addVariableLocationOps(ArrayRef<Value *> NewValues,
182
DIExpression *NewExpr) {
183
assert(NewExpr->hasAllLocationOps(getNumVariableLocationOps() +
184
NewValues.size()) &&
185
"NewExpr for debug variable intrinsic does not reference every "
186
"location operand.");
187
assert(!is_contained(NewValues, nullptr) && "New values must be non-null");
188
setArgOperand(2, MetadataAsValue::get(getContext(), NewExpr));
189
SmallVector<ValueAsMetadata *, 4> MDs;
190
for (auto *VMD : location_ops())
191
MDs.push_back(getAsMetadata(VMD));
192
for (auto *VMD : NewValues)
193
MDs.push_back(getAsMetadata(VMD));
194
setArgOperand(
195
0, MetadataAsValue::get(getContext(), DIArgList::get(getContext(), MDs)));
196
}
197
198
std::optional<uint64_t> DbgVariableIntrinsic::getFragmentSizeInBits() const {
199
if (auto Fragment = getExpression()->getFragmentInfo())
200
return Fragment->SizeInBits;
201
return getVariable()->getSizeInBits();
202
}
203
204
Value *DbgAssignIntrinsic::getAddress() const {
205
auto *MD = getRawAddress();
206
if (auto *V = dyn_cast<ValueAsMetadata>(MD))
207
return V->getValue();
208
209
// When the value goes to null, it gets replaced by an empty MDNode.
210
assert(!cast<MDNode>(MD)->getNumOperands() && "Expected an empty MDNode");
211
return nullptr;
212
}
213
214
void DbgAssignIntrinsic::setAssignId(DIAssignID *New) {
215
setOperand(OpAssignID, MetadataAsValue::get(getContext(), New));
216
}
217
218
void DbgAssignIntrinsic::setAddress(Value *V) {
219
setOperand(OpAddress,
220
MetadataAsValue::get(getContext(), ValueAsMetadata::get(V)));
221
}
222
223
void DbgAssignIntrinsic::setKillAddress() {
224
if (isKillAddress())
225
return;
226
setAddress(UndefValue::get(getAddress()->getType()));
227
}
228
229
bool DbgAssignIntrinsic::isKillAddress() const {
230
Value *Addr = getAddress();
231
return !Addr || isa<UndefValue>(Addr);
232
}
233
234
void DbgAssignIntrinsic::setValue(Value *V) {
235
setOperand(OpValue,
236
MetadataAsValue::get(getContext(), ValueAsMetadata::get(V)));
237
}
238
239
int llvm::Intrinsic::lookupLLVMIntrinsicByName(ArrayRef<const char *> NameTable,
240
StringRef Name) {
241
assert(Name.starts_with("llvm.") && "Unexpected intrinsic prefix");
242
243
// Do successive binary searches of the dotted name components. For
244
// "llvm.gc.experimental.statepoint.p1i8.p1i32", we will find the range of
245
// intrinsics starting with "llvm.gc", then "llvm.gc.experimental", then
246
// "llvm.gc.experimental.statepoint", and then we will stop as the range is
247
// size 1. During the search, we can skip the prefix that we already know is
248
// identical. By using strncmp we consider names with differing suffixes to
249
// be part of the equal range.
250
size_t CmpEnd = 4; // Skip the "llvm" component.
251
const char *const *Low = NameTable.begin();
252
const char *const *High = NameTable.end();
253
const char *const *LastLow = Low;
254
while (CmpEnd < Name.size() && High - Low > 0) {
255
size_t CmpStart = CmpEnd;
256
CmpEnd = Name.find('.', CmpStart + 1);
257
CmpEnd = CmpEnd == StringRef::npos ? Name.size() : CmpEnd;
258
auto Cmp = [CmpStart, CmpEnd](const char *LHS, const char *RHS) {
259
return strncmp(LHS + CmpStart, RHS + CmpStart, CmpEnd - CmpStart) < 0;
260
};
261
LastLow = Low;
262
std::tie(Low, High) = std::equal_range(Low, High, Name.data(), Cmp);
263
}
264
if (High - Low > 0)
265
LastLow = Low;
266
267
if (LastLow == NameTable.end())
268
return -1;
269
StringRef NameFound = *LastLow;
270
if (Name == NameFound ||
271
(Name.starts_with(NameFound) && Name[NameFound.size()] == '.'))
272
return LastLow - NameTable.begin();
273
return -1;
274
}
275
276
ConstantInt *InstrProfCntrInstBase::getNumCounters() const {
277
if (InstrProfValueProfileInst::classof(this))
278
llvm_unreachable("InstrProfValueProfileInst does not have counters!");
279
return cast<ConstantInt>(const_cast<Value *>(getArgOperand(2)));
280
}
281
282
ConstantInt *InstrProfCntrInstBase::getIndex() const {
283
if (InstrProfValueProfileInst::classof(this))
284
llvm_unreachable("Please use InstrProfValueProfileInst::getIndex()");
285
return cast<ConstantInt>(const_cast<Value *>(getArgOperand(3)));
286
}
287
288
Value *InstrProfIncrementInst::getStep() const {
289
if (InstrProfIncrementInstStep::classof(this)) {
290
return const_cast<Value *>(getArgOperand(4));
291
}
292
const Module *M = getModule();
293
LLVMContext &Context = M->getContext();
294
return ConstantInt::get(Type::getInt64Ty(Context), 1);
295
}
296
297
Value *InstrProfCallsite::getCallee() const {
298
if (isa<InstrProfCallsite>(this))
299
return getArgOperand(4);
300
return nullptr;
301
}
302
303
std::optional<RoundingMode> ConstrainedFPIntrinsic::getRoundingMode() const {
304
unsigned NumOperands = arg_size();
305
Metadata *MD = nullptr;
306
auto *MAV = dyn_cast<MetadataAsValue>(getArgOperand(NumOperands - 2));
307
if (MAV)
308
MD = MAV->getMetadata();
309
if (!MD || !isa<MDString>(MD))
310
return std::nullopt;
311
return convertStrToRoundingMode(cast<MDString>(MD)->getString());
312
}
313
314
std::optional<fp::ExceptionBehavior>
315
ConstrainedFPIntrinsic::getExceptionBehavior() const {
316
unsigned NumOperands = arg_size();
317
Metadata *MD = nullptr;
318
auto *MAV = dyn_cast<MetadataAsValue>(getArgOperand(NumOperands - 1));
319
if (MAV)
320
MD = MAV->getMetadata();
321
if (!MD || !isa<MDString>(MD))
322
return std::nullopt;
323
return convertStrToExceptionBehavior(cast<MDString>(MD)->getString());
324
}
325
326
bool ConstrainedFPIntrinsic::isDefaultFPEnvironment() const {
327
std::optional<fp::ExceptionBehavior> Except = getExceptionBehavior();
328
if (Except) {
329
if (*Except != fp::ebIgnore)
330
return false;
331
}
332
333
std::optional<RoundingMode> Rounding = getRoundingMode();
334
if (Rounding) {
335
if (*Rounding != RoundingMode::NearestTiesToEven)
336
return false;
337
}
338
339
return true;
340
}
341
342
static FCmpInst::Predicate getFPPredicateFromMD(const Value *Op) {
343
Metadata *MD = cast<MetadataAsValue>(Op)->getMetadata();
344
if (!MD || !isa<MDString>(MD))
345
return FCmpInst::BAD_FCMP_PREDICATE;
346
return StringSwitch<FCmpInst::Predicate>(cast<MDString>(MD)->getString())
347
.Case("oeq", FCmpInst::FCMP_OEQ)
348
.Case("ogt", FCmpInst::FCMP_OGT)
349
.Case("oge", FCmpInst::FCMP_OGE)
350
.Case("olt", FCmpInst::FCMP_OLT)
351
.Case("ole", FCmpInst::FCMP_OLE)
352
.Case("one", FCmpInst::FCMP_ONE)
353
.Case("ord", FCmpInst::FCMP_ORD)
354
.Case("uno", FCmpInst::FCMP_UNO)
355
.Case("ueq", FCmpInst::FCMP_UEQ)
356
.Case("ugt", FCmpInst::FCMP_UGT)
357
.Case("uge", FCmpInst::FCMP_UGE)
358
.Case("ult", FCmpInst::FCMP_ULT)
359
.Case("ule", FCmpInst::FCMP_ULE)
360
.Case("une", FCmpInst::FCMP_UNE)
361
.Default(FCmpInst::BAD_FCMP_PREDICATE);
362
}
363
364
FCmpInst::Predicate ConstrainedFPCmpIntrinsic::getPredicate() const {
365
return getFPPredicateFromMD(getArgOperand(2));
366
}
367
368
unsigned ConstrainedFPIntrinsic::getNonMetadataArgCount() const {
369
// All constrained fp intrinsics have "fpexcept" metadata.
370
unsigned NumArgs = arg_size() - 1;
371
372
// Some intrinsics have "round" metadata.
373
if (Intrinsic::hasConstrainedFPRoundingModeOperand(getIntrinsicID()))
374
NumArgs -= 1;
375
376
// Compare intrinsics take their predicate as metadata.
377
if (isa<ConstrainedFPCmpIntrinsic>(this))
378
NumArgs -= 1;
379
380
return NumArgs;
381
}
382
383
bool ConstrainedFPIntrinsic::classof(const IntrinsicInst *I) {
384
return Intrinsic::isConstrainedFPIntrinsic(I->getIntrinsicID());
385
}
386
387
ElementCount VPIntrinsic::getStaticVectorLength() const {
388
auto GetVectorLengthOfType = [](const Type *T) -> ElementCount {
389
const auto *VT = cast<VectorType>(T);
390
auto ElemCount = VT->getElementCount();
391
return ElemCount;
392
};
393
394
Value *VPMask = getMaskParam();
395
if (!VPMask) {
396
assert((getIntrinsicID() == Intrinsic::vp_merge ||
397
getIntrinsicID() == Intrinsic::vp_select) &&
398
"Unexpected VP intrinsic without mask operand");
399
return GetVectorLengthOfType(getType());
400
}
401
return GetVectorLengthOfType(VPMask->getType());
402
}
403
404
Value *VPIntrinsic::getMaskParam() const {
405
if (auto MaskPos = getMaskParamPos(getIntrinsicID()))
406
return getArgOperand(*MaskPos);
407
return nullptr;
408
}
409
410
void VPIntrinsic::setMaskParam(Value *NewMask) {
411
auto MaskPos = getMaskParamPos(getIntrinsicID());
412
setArgOperand(*MaskPos, NewMask);
413
}
414
415
Value *VPIntrinsic::getVectorLengthParam() const {
416
if (auto EVLPos = getVectorLengthParamPos(getIntrinsicID()))
417
return getArgOperand(*EVLPos);
418
return nullptr;
419
}
420
421
void VPIntrinsic::setVectorLengthParam(Value *NewEVL) {
422
auto EVLPos = getVectorLengthParamPos(getIntrinsicID());
423
setArgOperand(*EVLPos, NewEVL);
424
}
425
426
std::optional<unsigned>
427
VPIntrinsic::getMaskParamPos(Intrinsic::ID IntrinsicID) {
428
switch (IntrinsicID) {
429
default:
430
return std::nullopt;
431
432
#define BEGIN_REGISTER_VP_INTRINSIC(VPID, MASKPOS, VLENPOS) \
433
case Intrinsic::VPID: \
434
return MASKPOS;
435
#include "llvm/IR/VPIntrinsics.def"
436
}
437
}
438
439
std::optional<unsigned>
440
VPIntrinsic::getVectorLengthParamPos(Intrinsic::ID IntrinsicID) {
441
switch (IntrinsicID) {
442
default:
443
return std::nullopt;
444
445
#define BEGIN_REGISTER_VP_INTRINSIC(VPID, MASKPOS, VLENPOS) \
446
case Intrinsic::VPID: \
447
return VLENPOS;
448
#include "llvm/IR/VPIntrinsics.def"
449
}
450
}
451
452
/// \return the alignment of the pointer used by this load/store/gather or
453
/// scatter.
454
MaybeAlign VPIntrinsic::getPointerAlignment() const {
455
std::optional<unsigned> PtrParamOpt =
456
getMemoryPointerParamPos(getIntrinsicID());
457
assert(PtrParamOpt && "no pointer argument!");
458
return getParamAlign(*PtrParamOpt);
459
}
460
461
/// \return The pointer operand of this load,store, gather or scatter.
462
Value *VPIntrinsic::getMemoryPointerParam() const {
463
if (auto PtrParamOpt = getMemoryPointerParamPos(getIntrinsicID()))
464
return getArgOperand(*PtrParamOpt);
465
return nullptr;
466
}
467
468
std::optional<unsigned>
469
VPIntrinsic::getMemoryPointerParamPos(Intrinsic::ID VPID) {
470
switch (VPID) {
471
default:
472
break;
473
#define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
474
#define VP_PROPERTY_MEMOP(POINTERPOS, ...) return POINTERPOS;
475
#define END_REGISTER_VP_INTRINSIC(VPID) break;
476
#include "llvm/IR/VPIntrinsics.def"
477
}
478
return std::nullopt;
479
}
480
481
/// \return The data (payload) operand of this store or scatter.
482
Value *VPIntrinsic::getMemoryDataParam() const {
483
auto DataParamOpt = getMemoryDataParamPos(getIntrinsicID());
484
if (!DataParamOpt)
485
return nullptr;
486
return getArgOperand(*DataParamOpt);
487
}
488
489
std::optional<unsigned> VPIntrinsic::getMemoryDataParamPos(Intrinsic::ID VPID) {
490
switch (VPID) {
491
default:
492
break;
493
#define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
494
#define VP_PROPERTY_MEMOP(POINTERPOS, DATAPOS) return DATAPOS;
495
#define END_REGISTER_VP_INTRINSIC(VPID) break;
496
#include "llvm/IR/VPIntrinsics.def"
497
}
498
return std::nullopt;
499
}
500
501
constexpr bool isVPIntrinsic(Intrinsic::ID ID) {
502
switch (ID) {
503
default:
504
break;
505
#define BEGIN_REGISTER_VP_INTRINSIC(VPID, MASKPOS, VLENPOS) \
506
case Intrinsic::VPID: \
507
return true;
508
#include "llvm/IR/VPIntrinsics.def"
509
}
510
return false;
511
}
512
513
bool VPIntrinsic::isVPIntrinsic(Intrinsic::ID ID) {
514
return ::isVPIntrinsic(ID);
515
}
516
517
// Equivalent non-predicated opcode
518
constexpr static std::optional<unsigned>
519
getFunctionalOpcodeForVP(Intrinsic::ID ID) {
520
switch (ID) {
521
default:
522
break;
523
#define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
524
#define VP_PROPERTY_FUNCTIONAL_OPC(OPC) return Instruction::OPC;
525
#define END_REGISTER_VP_INTRINSIC(VPID) break;
526
#include "llvm/IR/VPIntrinsics.def"
527
}
528
return std::nullopt;
529
}
530
531
std::optional<unsigned>
532
VPIntrinsic::getFunctionalOpcodeForVP(Intrinsic::ID ID) {
533
return ::getFunctionalOpcodeForVP(ID);
534
}
535
536
// Equivalent non-predicated intrinsic ID
537
constexpr static std::optional<Intrinsic::ID>
538
getFunctionalIntrinsicIDForVP(Intrinsic::ID ID) {
539
switch (ID) {
540
default:
541
break;
542
#define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
543
#define VP_PROPERTY_FUNCTIONAL_INTRINSIC(INTRIN) return Intrinsic::INTRIN;
544
#define END_REGISTER_VP_INTRINSIC(VPID) break;
545
#include "llvm/IR/VPIntrinsics.def"
546
}
547
return std::nullopt;
548
}
549
550
std::optional<Intrinsic::ID>
551
VPIntrinsic::getFunctionalIntrinsicIDForVP(Intrinsic::ID ID) {
552
return ::getFunctionalIntrinsicIDForVP(ID);
553
}
554
555
constexpr static bool doesVPHaveNoFunctionalEquivalent(Intrinsic::ID ID) {
556
switch (ID) {
557
default:
558
break;
559
#define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
560
#define VP_PROPERTY_NO_FUNCTIONAL return true;
561
#define END_REGISTER_VP_INTRINSIC(VPID) break;
562
#include "llvm/IR/VPIntrinsics.def"
563
}
564
return false;
565
}
566
567
// All VP intrinsics should have an equivalent non-VP opcode or intrinsic
568
// defined, or be marked that they don't have one.
569
#define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) \
570
static_assert(doesVPHaveNoFunctionalEquivalent(Intrinsic::VPID) || \
571
getFunctionalOpcodeForVP(Intrinsic::VPID) || \
572
getFunctionalIntrinsicIDForVP(Intrinsic::VPID));
573
#include "llvm/IR/VPIntrinsics.def"
574
575
// Equivalent non-predicated constrained intrinsic
576
std::optional<Intrinsic::ID>
577
VPIntrinsic::getConstrainedIntrinsicIDForVP(Intrinsic::ID ID) {
578
switch (ID) {
579
default:
580
break;
581
#define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
582
#define VP_PROPERTY_CONSTRAINEDFP(HASRND, HASEXCEPT, CID) return Intrinsic::CID;
583
#define END_REGISTER_VP_INTRINSIC(VPID) break;
584
#include "llvm/IR/VPIntrinsics.def"
585
}
586
return std::nullopt;
587
}
588
589
Intrinsic::ID VPIntrinsic::getForOpcode(unsigned IROPC) {
590
switch (IROPC) {
591
default:
592
break;
593
594
#define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) break;
595
#define VP_PROPERTY_FUNCTIONAL_OPC(OPC) case Instruction::OPC:
596
#define END_REGISTER_VP_INTRINSIC(VPID) return Intrinsic::VPID;
597
#include "llvm/IR/VPIntrinsics.def"
598
}
599
return Intrinsic::not_intrinsic;
600
}
601
602
constexpr static Intrinsic::ID getForIntrinsic(Intrinsic::ID Id) {
603
if (::isVPIntrinsic(Id))
604
return Id;
605
606
switch (Id) {
607
default:
608
break;
609
#define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) break;
610
#define VP_PROPERTY_FUNCTIONAL_INTRINSIC(INTRIN) case Intrinsic::INTRIN:
611
#define END_REGISTER_VP_INTRINSIC(VPID) return Intrinsic::VPID;
612
#include "llvm/IR/VPIntrinsics.def"
613
}
614
return Intrinsic::not_intrinsic;
615
}
616
617
Intrinsic::ID VPIntrinsic::getForIntrinsic(Intrinsic::ID Id) {
618
return ::getForIntrinsic(Id);
619
}
620
621
bool VPIntrinsic::canIgnoreVectorLengthParam() const {
622
using namespace PatternMatch;
623
624
ElementCount EC = getStaticVectorLength();
625
626
// No vlen param - no lanes masked-off by it.
627
auto *VLParam = getVectorLengthParam();
628
if (!VLParam)
629
return true;
630
631
// Note that the VP intrinsic causes undefined behavior if the Explicit Vector
632
// Length parameter is strictly greater-than the number of vector elements of
633
// the operation. This function returns true when this is detected statically
634
// in the IR.
635
636
// Check whether "W == vscale * EC.getKnownMinValue()"
637
if (EC.isScalable()) {
638
// Compare vscale patterns
639
uint64_t VScaleFactor;
640
if (match(VLParam, m_Mul(m_VScale(), m_ConstantInt(VScaleFactor))))
641
return VScaleFactor >= EC.getKnownMinValue();
642
return (EC.getKnownMinValue() == 1) && match(VLParam, m_VScale());
643
}
644
645
// standard SIMD operation
646
const auto *VLConst = dyn_cast<ConstantInt>(VLParam);
647
if (!VLConst)
648
return false;
649
650
uint64_t VLNum = VLConst->getZExtValue();
651
if (VLNum >= EC.getKnownMinValue())
652
return true;
653
654
return false;
655
}
656
657
Function *VPIntrinsic::getDeclarationForParams(Module *M, Intrinsic::ID VPID,
658
Type *ReturnType,
659
ArrayRef<Value *> Params) {
660
assert(isVPIntrinsic(VPID) && "not a VP intrinsic");
661
Function *VPFunc;
662
switch (VPID) {
663
default: {
664
Type *OverloadTy = Params[0]->getType();
665
if (VPReductionIntrinsic::isVPReduction(VPID))
666
OverloadTy =
667
Params[*VPReductionIntrinsic::getVectorParamPos(VPID)]->getType();
668
669
VPFunc = Intrinsic::getDeclaration(M, VPID, OverloadTy);
670
break;
671
}
672
case Intrinsic::vp_trunc:
673
case Intrinsic::vp_sext:
674
case Intrinsic::vp_zext:
675
case Intrinsic::vp_fptoui:
676
case Intrinsic::vp_fptosi:
677
case Intrinsic::vp_uitofp:
678
case Intrinsic::vp_sitofp:
679
case Intrinsic::vp_fptrunc:
680
case Intrinsic::vp_fpext:
681
case Intrinsic::vp_ptrtoint:
682
case Intrinsic::vp_inttoptr:
683
case Intrinsic::vp_lrint:
684
case Intrinsic::vp_llrint:
685
case Intrinsic::vp_cttz_elts:
686
VPFunc =
687
Intrinsic::getDeclaration(M, VPID, {ReturnType, Params[0]->getType()});
688
break;
689
case Intrinsic::vp_is_fpclass:
690
VPFunc = Intrinsic::getDeclaration(M, VPID, {Params[0]->getType()});
691
break;
692
case Intrinsic::vp_merge:
693
case Intrinsic::vp_select:
694
VPFunc = Intrinsic::getDeclaration(M, VPID, {Params[1]->getType()});
695
break;
696
case Intrinsic::vp_load:
697
VPFunc = Intrinsic::getDeclaration(
698
M, VPID, {ReturnType, Params[0]->getType()});
699
break;
700
case Intrinsic::experimental_vp_strided_load:
701
VPFunc = Intrinsic::getDeclaration(
702
M, VPID, {ReturnType, Params[0]->getType(), Params[1]->getType()});
703
break;
704
case Intrinsic::vp_gather:
705
VPFunc = Intrinsic::getDeclaration(
706
M, VPID, {ReturnType, Params[0]->getType()});
707
break;
708
case Intrinsic::vp_store:
709
VPFunc = Intrinsic::getDeclaration(
710
M, VPID, {Params[0]->getType(), Params[1]->getType()});
711
break;
712
case Intrinsic::experimental_vp_strided_store:
713
VPFunc = Intrinsic::getDeclaration(
714
M, VPID,
715
{Params[0]->getType(), Params[1]->getType(), Params[2]->getType()});
716
break;
717
case Intrinsic::vp_scatter:
718
VPFunc = Intrinsic::getDeclaration(
719
M, VPID, {Params[0]->getType(), Params[1]->getType()});
720
break;
721
case Intrinsic::experimental_vp_splat:
722
VPFunc = Intrinsic::getDeclaration(M, VPID, ReturnType);
723
break;
724
}
725
assert(VPFunc && "Could not declare VP intrinsic");
726
return VPFunc;
727
}
728
729
bool VPReductionIntrinsic::isVPReduction(Intrinsic::ID ID) {
730
switch (ID) {
731
default:
732
break;
733
#define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
734
#define VP_PROPERTY_REDUCTION(STARTPOS, ...) return true;
735
#define END_REGISTER_VP_INTRINSIC(VPID) break;
736
#include "llvm/IR/VPIntrinsics.def"
737
}
738
return false;
739
}
740
741
bool VPCastIntrinsic::isVPCast(Intrinsic::ID ID) {
742
switch (ID) {
743
default:
744
break;
745
#define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
746
#define VP_PROPERTY_CASTOP return true;
747
#define END_REGISTER_VP_INTRINSIC(VPID) break;
748
#include "llvm/IR/VPIntrinsics.def"
749
}
750
return false;
751
}
752
753
bool VPCmpIntrinsic::isVPCmp(Intrinsic::ID ID) {
754
switch (ID) {
755
default:
756
break;
757
#define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
758
#define VP_PROPERTY_CMP(CCPOS, ...) return true;
759
#define END_REGISTER_VP_INTRINSIC(VPID) break;
760
#include "llvm/IR/VPIntrinsics.def"
761
}
762
return false;
763
}
764
765
bool VPBinOpIntrinsic::isVPBinOp(Intrinsic::ID ID) {
766
switch (ID) {
767
default:
768
break;
769
#define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
770
#define VP_PROPERTY_BINARYOP return true;
771
#define END_REGISTER_VP_INTRINSIC(VPID) break;
772
#include "llvm/IR/VPIntrinsics.def"
773
}
774
return false;
775
}
776
777
static ICmpInst::Predicate getIntPredicateFromMD(const Value *Op) {
778
Metadata *MD = cast<MetadataAsValue>(Op)->getMetadata();
779
if (!MD || !isa<MDString>(MD))
780
return ICmpInst::BAD_ICMP_PREDICATE;
781
return StringSwitch<ICmpInst::Predicate>(cast<MDString>(MD)->getString())
782
.Case("eq", ICmpInst::ICMP_EQ)
783
.Case("ne", ICmpInst::ICMP_NE)
784
.Case("ugt", ICmpInst::ICMP_UGT)
785
.Case("uge", ICmpInst::ICMP_UGE)
786
.Case("ult", ICmpInst::ICMP_ULT)
787
.Case("ule", ICmpInst::ICMP_ULE)
788
.Case("sgt", ICmpInst::ICMP_SGT)
789
.Case("sge", ICmpInst::ICMP_SGE)
790
.Case("slt", ICmpInst::ICMP_SLT)
791
.Case("sle", ICmpInst::ICMP_SLE)
792
.Default(ICmpInst::BAD_ICMP_PREDICATE);
793
}
794
795
CmpInst::Predicate VPCmpIntrinsic::getPredicate() const {
796
bool IsFP = true;
797
std::optional<unsigned> CCArgIdx;
798
switch (getIntrinsicID()) {
799
default:
800
break;
801
#define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
802
#define VP_PROPERTY_CMP(CCPOS, ISFP) \
803
CCArgIdx = CCPOS; \
804
IsFP = ISFP; \
805
break;
806
#define END_REGISTER_VP_INTRINSIC(VPID) break;
807
#include "llvm/IR/VPIntrinsics.def"
808
}
809
assert(CCArgIdx && "Unexpected vector-predicated comparison");
810
return IsFP ? getFPPredicateFromMD(getArgOperand(*CCArgIdx))
811
: getIntPredicateFromMD(getArgOperand(*CCArgIdx));
812
}
813
814
unsigned VPReductionIntrinsic::getVectorParamPos() const {
815
return *VPReductionIntrinsic::getVectorParamPos(getIntrinsicID());
816
}
817
818
unsigned VPReductionIntrinsic::getStartParamPos() const {
819
return *VPReductionIntrinsic::getStartParamPos(getIntrinsicID());
820
}
821
822
std::optional<unsigned>
823
VPReductionIntrinsic::getVectorParamPos(Intrinsic::ID ID) {
824
switch (ID) {
825
#define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
826
#define VP_PROPERTY_REDUCTION(STARTPOS, VECTORPOS) return VECTORPOS;
827
#define END_REGISTER_VP_INTRINSIC(VPID) break;
828
#include "llvm/IR/VPIntrinsics.def"
829
default:
830
break;
831
}
832
return std::nullopt;
833
}
834
835
std::optional<unsigned>
836
VPReductionIntrinsic::getStartParamPos(Intrinsic::ID ID) {
837
switch (ID) {
838
#define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
839
#define VP_PROPERTY_REDUCTION(STARTPOS, VECTORPOS) return STARTPOS;
840
#define END_REGISTER_VP_INTRINSIC(VPID) break;
841
#include "llvm/IR/VPIntrinsics.def"
842
default:
843
break;
844
}
845
return std::nullopt;
846
}
847
848
Instruction::BinaryOps BinaryOpIntrinsic::getBinaryOp() const {
849
switch (getIntrinsicID()) {
850
case Intrinsic::uadd_with_overflow:
851
case Intrinsic::sadd_with_overflow:
852
case Intrinsic::uadd_sat:
853
case Intrinsic::sadd_sat:
854
return Instruction::Add;
855
case Intrinsic::usub_with_overflow:
856
case Intrinsic::ssub_with_overflow:
857
case Intrinsic::usub_sat:
858
case Intrinsic::ssub_sat:
859
return Instruction::Sub;
860
case Intrinsic::umul_with_overflow:
861
case Intrinsic::smul_with_overflow:
862
return Instruction::Mul;
863
default:
864
llvm_unreachable("Invalid intrinsic");
865
}
866
}
867
868
bool BinaryOpIntrinsic::isSigned() const {
869
switch (getIntrinsicID()) {
870
case Intrinsic::sadd_with_overflow:
871
case Intrinsic::ssub_with_overflow:
872
case Intrinsic::smul_with_overflow:
873
case Intrinsic::sadd_sat:
874
case Intrinsic::ssub_sat:
875
return true;
876
default:
877
return false;
878
}
879
}
880
881
unsigned BinaryOpIntrinsic::getNoWrapKind() const {
882
if (isSigned())
883
return OverflowingBinaryOperator::NoSignedWrap;
884
else
885
return OverflowingBinaryOperator::NoUnsignedWrap;
886
}
887
888
const Value *GCProjectionInst::getStatepoint() const {
889
const Value *Token = getArgOperand(0);
890
if (isa<UndefValue>(Token))
891
return Token;
892
893
// Treat none token as if it was undef here
894
if (isa<ConstantTokenNone>(Token))
895
return UndefValue::get(Token->getType());
896
897
// This takes care both of relocates for call statepoints and relocates
898
// on normal path of invoke statepoint.
899
if (!isa<LandingPadInst>(Token))
900
return cast<GCStatepointInst>(Token);
901
902
// This relocate is on exceptional path of an invoke statepoint
903
const BasicBlock *InvokeBB =
904
cast<Instruction>(Token)->getParent()->getUniquePredecessor();
905
906
assert(InvokeBB && "safepoints should have unique landingpads");
907
assert(InvokeBB->getTerminator() &&
908
"safepoint block should be well formed");
909
910
return cast<GCStatepointInst>(InvokeBB->getTerminator());
911
}
912
913
Value *GCRelocateInst::getBasePtr() const {
914
auto Statepoint = getStatepoint();
915
if (isa<UndefValue>(Statepoint))
916
return UndefValue::get(Statepoint->getType());
917
918
auto *GCInst = cast<GCStatepointInst>(Statepoint);
919
if (auto Opt = GCInst->getOperandBundle(LLVMContext::OB_gc_live))
920
return *(Opt->Inputs.begin() + getBasePtrIndex());
921
return *(GCInst->arg_begin() + getBasePtrIndex());
922
}
923
924
Value *GCRelocateInst::getDerivedPtr() const {
925
auto *Statepoint = getStatepoint();
926
if (isa<UndefValue>(Statepoint))
927
return UndefValue::get(Statepoint->getType());
928
929
auto *GCInst = cast<GCStatepointInst>(Statepoint);
930
if (auto Opt = GCInst->getOperandBundle(LLVMContext::OB_gc_live))
931
return *(Opt->Inputs.begin() + getDerivedPtrIndex());
932
return *(GCInst->arg_begin() + getDerivedPtrIndex());
933
}
934
935