clang 20.0.0git
SemaRISCV.cpp
Go to the documentation of this file.
1//===------ SemaRISCV.cpp ------- RISC-V target-specific routines ---------===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8//
9// This file implements semantic analysis functions specific to RISC-V.
10//
11//===----------------------------------------------------------------------===//
12
15#include "clang/AST/Decl.h"
20#include "clang/Sema/Attr.h"
22#include "clang/Sema/Lookup.h"
25#include "clang/Sema/Sema.h"
27#include "llvm/ADT/SmallVector.h"
28#include "llvm/TargetParser/RISCVISAInfo.h"
29#include "llvm/TargetParser/RISCVTargetParser.h"
30#include <optional>
31#include <string>
32#include <vector>
33
34using namespace llvm;
35using namespace clang;
36using namespace clang::RISCV;
37
39
40namespace {
41
42// Function definition of a RVV intrinsic.
43struct RVVIntrinsicDef {
44 /// Mapping to which clang built-in function, e.g. __builtin_rvv_vadd.
45 std::string BuiltinName;
46
47 /// Function signature, first element is return type.
48 RVVTypes Signature;
49};
50
51struct RVVOverloadIntrinsicDef {
52 // Indexes of RISCVIntrinsicManagerImpl::IntrinsicList.
54};
55
56} // namespace
57
59#define DECL_SIGNATURE_TABLE
60#include "clang/Basic/riscv_vector_builtin_sema.inc"
61#undef DECL_SIGNATURE_TABLE
62};
63
65#define DECL_SIGNATURE_TABLE
66#include "clang/Basic/riscv_sifive_vector_builtin_sema.inc"
67#undef DECL_SIGNATURE_TABLE
68};
69
71#define DECL_INTRINSIC_RECORDS
72#include "clang/Basic/riscv_vector_builtin_sema.inc"
73#undef DECL_INTRINSIC_RECORDS
74};
75
77#define DECL_INTRINSIC_RECORDS
78#include "clang/Basic/riscv_sifive_vector_builtin_sema.inc"
79#undef DECL_INTRINSIC_RECORDS
80};
81
82// Get subsequence of signature table.
84ProtoSeq2ArrayRef(IntrinsicKind K, uint16_t Index, uint8_t Length) {
85 switch (K) {
86 case IntrinsicKind::RVV:
87 return ArrayRef(&RVVSignatureTable[Index], Length);
88 case IntrinsicKind::SIFIVE_VECTOR:
89 return ArrayRef(&RVSiFiveVectorSignatureTable[Index], Length);
90 }
91 llvm_unreachable("Unhandled IntrinsicKind");
92}
93
94static QualType RVVType2Qual(ASTContext &Context, const RVVType *Type) {
95 QualType QT;
96 switch (Type->getScalarType()) {
97 case ScalarTypeKind::Void:
98 QT = Context.VoidTy;
99 break;
100 case ScalarTypeKind::Size_t:
101 QT = Context.getSizeType();
102 break;
103 case ScalarTypeKind::Ptrdiff_t:
104 QT = Context.getPointerDiffType();
105 break;
106 case ScalarTypeKind::UnsignedLong:
107 QT = Context.UnsignedLongTy;
108 break;
109 case ScalarTypeKind::SignedLong:
110 QT = Context.LongTy;
111 break;
112 case ScalarTypeKind::Boolean:
113 QT = Context.BoolTy;
114 break;
115 case ScalarTypeKind::SignedInteger:
116 QT = Context.getIntTypeForBitwidth(Type->getElementBitwidth(), true);
117 break;
118 case ScalarTypeKind::UnsignedInteger:
119 QT = Context.getIntTypeForBitwidth(Type->getElementBitwidth(), false);
120 break;
121 case ScalarTypeKind::BFloat:
122 QT = Context.BFloat16Ty;
123 break;
124 case ScalarTypeKind::Float:
125 switch (Type->getElementBitwidth()) {
126 case 64:
127 QT = Context.DoubleTy;
128 break;
129 case 32:
130 QT = Context.FloatTy;
131 break;
132 case 16:
133 QT = Context.Float16Ty;
134 break;
135 default:
136 llvm_unreachable("Unsupported floating point width.");
137 }
138 break;
139 case Invalid:
140 case Undefined:
141 llvm_unreachable("Unhandled type.");
142 }
143 if (Type->isVector()) {
144 if (Type->isTuple())
145 QT = Context.getScalableVectorType(QT, *Type->getScale(), Type->getNF());
146 else
147 QT = Context.getScalableVectorType(QT, *Type->getScale());
148 }
149
150 if (Type->isConstant())
151 QT = Context.getConstType(QT);
152
153 // Transform the type to a pointer as the last step, if necessary.
154 if (Type->isPointer())
155 QT = Context.getPointerType(QT);
156
157 return QT;
158}
159
160namespace {
161class RISCVIntrinsicManagerImpl : public sema::RISCVIntrinsicManager {
162private:
163 Sema &S;
164 ASTContext &Context;
165 RVVTypeCache TypeCache;
166 bool ConstructedRISCVVBuiltins;
167 bool ConstructedRISCVSiFiveVectorBuiltins;
168
169 // List of all RVV intrinsic.
170 std::vector<RVVIntrinsicDef> IntrinsicList;
171 // Mapping function name to index of IntrinsicList.
172 StringMap<uint32_t> Intrinsics;
173 // Mapping function name to RVVOverloadIntrinsicDef.
174 StringMap<RVVOverloadIntrinsicDef> OverloadIntrinsics;
175
176 // Create RVVIntrinsicDef.
177 void InitRVVIntrinsic(const RVVIntrinsicRecord &Record, StringRef SuffixStr,
178 StringRef OverloadedSuffixStr, bool IsMask,
179 RVVTypes &Types, bool HasPolicy, Policy PolicyAttrs);
180
181 // Create FunctionDecl for a vector intrinsic.
182 void CreateRVVIntrinsicDecl(LookupResult &LR, IdentifierInfo *II,
183 Preprocessor &PP, uint32_t Index,
184 bool IsOverload);
185
186 void ConstructRVVIntrinsics(ArrayRef<RVVIntrinsicRecord> Recs,
187 IntrinsicKind K);
188
189public:
190 RISCVIntrinsicManagerImpl(clang::Sema &S) : S(S), Context(S.Context) {
191 ConstructedRISCVVBuiltins = false;
192 ConstructedRISCVSiFiveVectorBuiltins = false;
193 }
194
195 // Initialize IntrinsicList
196 void InitIntrinsicList() override;
197
198 // Create RISC-V vector intrinsic and insert into symbol table if found, and
199 // return true, otherwise return false.
201 Preprocessor &PP) override;
202};
203} // namespace
204
205void RISCVIntrinsicManagerImpl::ConstructRVVIntrinsics(
207 const TargetInfo &TI = Context.getTargetInfo();
208 static const std::pair<const char *, RVVRequire> FeatureCheckList[] = {
209 {"64bit", RVV_REQ_RV64},
210 {"xsfvcp", RVV_REQ_Xsfvcp},
211 {"xsfvfnrclipxfqf", RVV_REQ_Xsfvfnrclipxfqf},
212 {"xsfvfwmaccqqq", RVV_REQ_Xsfvfwmaccqqq},
213 {"xsfvqmaccdod", RVV_REQ_Xsfvqmaccdod},
214 {"xsfvqmaccqoq", RVV_REQ_Xsfvqmaccqoq},
215 {"zvbb", RVV_REQ_Zvbb},
216 {"zvbc", RVV_REQ_Zvbc},
217 {"zvkb", RVV_REQ_Zvkb},
218 {"zvkg", RVV_REQ_Zvkg},
219 {"zvkned", RVV_REQ_Zvkned},
220 {"zvknha", RVV_REQ_Zvknha},
221 {"zvknhb", RVV_REQ_Zvknhb},
222 {"zvksed", RVV_REQ_Zvksed},
223 {"zvksh", RVV_REQ_Zvksh},
224 {"zvfbfwma", RVV_REQ_Zvfbfwma},
225 {"zvfbfmin", RVV_REQ_Zvfbfmin},
226 {"zvfh", RVV_REQ_Zvfh},
227 {"experimental", RVV_REQ_Experimental}};
228
229 // Construction of RVVIntrinsicRecords need to sync with createRVVIntrinsics
230 // in RISCVVEmitter.cpp.
231 for (auto &Record : Recs) {
232 // Check requirements.
233 if (llvm::any_of(FeatureCheckList, [&](const auto &Item) {
234 return (Record.RequiredExtensions & Item.second) == Item.second &&
235 !TI.hasFeature(Item.first);
236 }))
237 continue;
238
239 // Create Intrinsics for each type and LMUL.
240 BasicType BaseType = BasicType::Unknown;
241 ArrayRef<PrototypeDescriptor> BasicProtoSeq =
242 ProtoSeq2ArrayRef(K, Record.PrototypeIndex, Record.PrototypeLength);
244 ProtoSeq2ArrayRef(K, Record.SuffixIndex, Record.SuffixLength);
245 ArrayRef<PrototypeDescriptor> OverloadedSuffixProto = ProtoSeq2ArrayRef(
246 K, Record.OverloadedSuffixIndex, Record.OverloadedSuffixSize);
247
248 PolicyScheme UnMaskedPolicyScheme =
249 static_cast<PolicyScheme>(Record.UnMaskedPolicyScheme);
250 PolicyScheme MaskedPolicyScheme =
251 static_cast<PolicyScheme>(Record.MaskedPolicyScheme);
252
253 const Policy DefaultPolicy;
254
257 BasicProtoSeq, /*IsMasked=*/false,
258 /*HasMaskedOffOperand=*/false, Record.HasVL, Record.NF,
259 UnMaskedPolicyScheme, DefaultPolicy, Record.IsTuple);
260
262 if (Record.HasMasked)
264 BasicProtoSeq, /*IsMasked=*/true, Record.HasMaskedOffOperand,
265 Record.HasVL, Record.NF, MaskedPolicyScheme, DefaultPolicy,
266 Record.IsTuple);
267
268 bool UnMaskedHasPolicy = UnMaskedPolicyScheme != PolicyScheme::SchemeNone;
269 bool MaskedHasPolicy = MaskedPolicyScheme != PolicyScheme::SchemeNone;
270 SmallVector<Policy> SupportedUnMaskedPolicies =
272 SmallVector<Policy> SupportedMaskedPolicies =
274 Record.HasMaskPolicy);
275
276 for (unsigned int TypeRangeMaskShift = 0;
277 TypeRangeMaskShift <= static_cast<unsigned int>(BasicType::MaxOffset);
278 ++TypeRangeMaskShift) {
279 unsigned int BaseTypeI = 1 << TypeRangeMaskShift;
280 BaseType = static_cast<BasicType>(BaseTypeI);
281
282 if ((BaseTypeI & Record.TypeRangeMask) != BaseTypeI)
283 continue;
284
285 // Expanded with different LMUL.
286 for (int Log2LMUL = -3; Log2LMUL <= 3; Log2LMUL++) {
287 if (!(Record.Log2LMULMask & (1 << (Log2LMUL + 3))))
288 continue;
289
290 std::optional<RVVTypes> Types =
291 TypeCache.computeTypes(BaseType, Log2LMUL, Record.NF, ProtoSeq);
292
293 // Ignored to create new intrinsic if there are any illegal types.
294 if (!Types.has_value())
295 continue;
296
297 std::string SuffixStr = RVVIntrinsic::getSuffixStr(
298 TypeCache, BaseType, Log2LMUL, SuffixProto);
299 std::string OverloadedSuffixStr = RVVIntrinsic::getSuffixStr(
300 TypeCache, BaseType, Log2LMUL, OverloadedSuffixProto);
301
302 // Create non-masked intrinsic.
303 InitRVVIntrinsic(Record, SuffixStr, OverloadedSuffixStr, false, *Types,
304 UnMaskedHasPolicy, DefaultPolicy);
305
306 // Create non-masked policy intrinsic.
307 if (Record.UnMaskedPolicyScheme != PolicyScheme::SchemeNone) {
308 for (auto P : SupportedUnMaskedPolicies) {
311 BasicProtoSeq, /*IsMasked=*/false,
312 /*HasMaskedOffOperand=*/false, Record.HasVL, Record.NF,
313 UnMaskedPolicyScheme, P, Record.IsTuple);
314 std::optional<RVVTypes> PolicyTypes = TypeCache.computeTypes(
315 BaseType, Log2LMUL, Record.NF, PolicyPrototype);
316 InitRVVIntrinsic(Record, SuffixStr, OverloadedSuffixStr,
317 /*IsMask=*/false, *PolicyTypes, UnMaskedHasPolicy,
318 P);
319 }
320 }
321 if (!Record.HasMasked)
322 continue;
323 // Create masked intrinsic.
324 std::optional<RVVTypes> MaskTypes =
325 TypeCache.computeTypes(BaseType, Log2LMUL, Record.NF, ProtoMaskSeq);
326 InitRVVIntrinsic(Record, SuffixStr, OverloadedSuffixStr, true,
327 *MaskTypes, MaskedHasPolicy, DefaultPolicy);
328 if (Record.MaskedPolicyScheme == PolicyScheme::SchemeNone)
329 continue;
330 // Create masked policy intrinsic.
331 for (auto P : SupportedMaskedPolicies) {
334 BasicProtoSeq, /*IsMasked=*/true, Record.HasMaskedOffOperand,
335 Record.HasVL, Record.NF, MaskedPolicyScheme, P,
336 Record.IsTuple);
337 std::optional<RVVTypes> PolicyTypes = TypeCache.computeTypes(
338 BaseType, Log2LMUL, Record.NF, PolicyPrototype);
339 InitRVVIntrinsic(Record, SuffixStr, OverloadedSuffixStr,
340 /*IsMask=*/true, *PolicyTypes, MaskedHasPolicy, P);
341 }
342 } // End for different LMUL
343 } // End for different TypeRange
344 }
345}
346
347void RISCVIntrinsicManagerImpl::InitIntrinsicList() {
348
349 if (S.RISCV().DeclareRVVBuiltins && !ConstructedRISCVVBuiltins) {
350 ConstructedRISCVVBuiltins = true;
351 ConstructRVVIntrinsics(RVVIntrinsicRecords, IntrinsicKind::RVV);
352 }
354 !ConstructedRISCVSiFiveVectorBuiltins) {
355 ConstructedRISCVSiFiveVectorBuiltins = true;
356 ConstructRVVIntrinsics(RVSiFiveVectorIntrinsicRecords,
357 IntrinsicKind::SIFIVE_VECTOR);
358 }
359}
360
361// Compute name and signatures for intrinsic with practical types.
362void RISCVIntrinsicManagerImpl::InitRVVIntrinsic(
363 const RVVIntrinsicRecord &Record, StringRef SuffixStr,
364 StringRef OverloadedSuffixStr, bool IsMasked, RVVTypes &Signature,
365 bool HasPolicy, Policy PolicyAttrs) {
366 // Function name, e.g. vadd_vv_i32m1.
367 std::string Name = Record.Name;
368 if (!SuffixStr.empty())
369 Name += "_" + SuffixStr.str();
370
371 // Overloaded function name, e.g. vadd.
372 std::string OverloadedName;
373 if (!Record.OverloadedName)
374 OverloadedName = StringRef(Record.Name).split("_").first.str();
375 else
376 OverloadedName = Record.OverloadedName;
377 if (!OverloadedSuffixStr.empty())
378 OverloadedName += "_" + OverloadedSuffixStr.str();
379
380 // clang built-in function name, e.g. __builtin_rvv_vadd.
381 std::string BuiltinName = std::string(Record.Name);
382
383 RVVIntrinsic::updateNamesAndPolicy(IsMasked, HasPolicy, Name, BuiltinName,
384 OverloadedName, PolicyAttrs,
385 Record.HasFRMRoundModeOp);
386
387 // Put into IntrinsicList.
388 uint32_t Index = IntrinsicList.size();
389 assert(IntrinsicList.size() == (size_t)Index &&
390 "Intrinsics indices overflow.");
391 IntrinsicList.push_back({BuiltinName, Signature});
392
393 // Creating mapping to Intrinsics.
394 Intrinsics.insert({Name, Index});
395
396 // Get the RVVOverloadIntrinsicDef.
397 RVVOverloadIntrinsicDef &OverloadIntrinsicDef =
398 OverloadIntrinsics[OverloadedName];
399
400 // And added the index.
401 OverloadIntrinsicDef.Indexes.push_back(Index);
402}
403
404void RISCVIntrinsicManagerImpl::CreateRVVIntrinsicDecl(LookupResult &LR,
405 IdentifierInfo *II,
406 Preprocessor &PP,
407 uint32_t Index,
408 bool IsOverload) {
409 ASTContext &Context = S.Context;
410 RVVIntrinsicDef &IDef = IntrinsicList[Index];
411 RVVTypes Sigs = IDef.Signature;
412 size_t SigLength = Sigs.size();
413 RVVType *ReturnType = Sigs[0];
414 QualType RetType = RVVType2Qual(Context, ReturnType);
416 QualType BuiltinFuncType;
417
418 // Skip return type, and convert RVVType to QualType for arguments.
419 for (size_t i = 1; i < SigLength; ++i)
420 ArgTypes.push_back(RVVType2Qual(Context, Sigs[i]));
421
423 Context.getDefaultCallingConvention(false, false, true));
424
425 PI.Variadic = false;
426
428 BuiltinFuncType = Context.getFunctionType(RetType, ArgTypes, PI);
430
431 FunctionDecl *RVVIntrinsicDecl = FunctionDecl::Create(
432 Context, Parent, Loc, Loc, II, BuiltinFuncType, /*TInfo=*/nullptr,
434 /*isInlineSpecified*/ false,
435 /*hasWrittenPrototype*/ true);
436
437 // Create Decl objects for each parameter, adding them to the
438 // FunctionDecl.
439 const auto *FP = cast<FunctionProtoType>(BuiltinFuncType);
441 for (unsigned IParm = 0, E = FP->getNumParams(); IParm != E; ++IParm) {
442 ParmVarDecl *Parm =
443 ParmVarDecl::Create(Context, RVVIntrinsicDecl, Loc, Loc, nullptr,
444 FP->getParamType(IParm), nullptr, SC_None, nullptr);
445 Parm->setScopeInfo(0, IParm);
446 ParmList.push_back(Parm);
447 }
448 RVVIntrinsicDecl->setParams(ParmList);
449
450 // Add function attributes.
451 if (IsOverload)
452 RVVIntrinsicDecl->addAttr(OverloadableAttr::CreateImplicit(Context));
453
454 // Setup alias to __builtin_rvv_*
455 IdentifierInfo &IntrinsicII =
456 PP.getIdentifierTable().get("__builtin_rvv_" + IDef.BuiltinName);
457 RVVIntrinsicDecl->addAttr(
458 BuiltinAliasAttr::CreateImplicit(S.Context, &IntrinsicII));
459
460 // Add to symbol table.
461 LR.addDecl(RVVIntrinsicDecl);
462}
463
464bool RISCVIntrinsicManagerImpl::CreateIntrinsicIfFound(LookupResult &LR,
465 IdentifierInfo *II,
466 Preprocessor &PP) {
467 StringRef Name = II->getName();
468 if (!Name.consume_front("__riscv_"))
469 return false;
470
471 // Lookup the function name from the overload intrinsics first.
472 auto OvIItr = OverloadIntrinsics.find(Name);
473 if (OvIItr != OverloadIntrinsics.end()) {
474 const RVVOverloadIntrinsicDef &OvIntrinsicDef = OvIItr->second;
475 for (auto Index : OvIntrinsicDef.Indexes)
476 CreateRVVIntrinsicDecl(LR, II, PP, Index,
477 /*IsOverload*/ true);
478
479 // If we added overloads, need to resolve the lookup result.
480 LR.resolveKind();
481 return true;
482 }
483
484 // Lookup the function name from the intrinsics.
485 auto Itr = Intrinsics.find(Name);
486 if (Itr != Intrinsics.end()) {
487 CreateRVVIntrinsicDecl(LR, II, PP, Itr->second,
488 /*IsOverload*/ false);
489 return true;
490 }
491
492 // It's not an RVV intrinsics.
493 return false;
494}
495
496namespace clang {
497std::unique_ptr<clang::sema::RISCVIntrinsicManager>
499 return std::make_unique<RISCVIntrinsicManagerImpl>(S);
500}
501
502bool SemaRISCV::CheckLMUL(CallExpr *TheCall, unsigned ArgNum) {
503 llvm::APSInt Result;
504
505 // We can't check the value of a dependent argument.
506 Expr *Arg = TheCall->getArg(ArgNum);
507 if (Arg->isTypeDependent() || Arg->isValueDependent())
508 return false;
509
510 // Check constant-ness first.
511 if (SemaRef.BuiltinConstantArg(TheCall, ArgNum, Result))
512 return true;
513
514 int64_t Val = Result.getSExtValue();
515 if ((Val >= 0 && Val <= 3) || (Val >= 5 && Val <= 7))
516 return false;
517
518 return Diag(TheCall->getBeginLoc(), diag::err_riscv_builtin_invalid_lmul)
519 << Arg->getSourceRange();
520}
521
522static bool CheckInvalidVLENandLMUL(const TargetInfo &TI, CallExpr *TheCall,
523 Sema &S, QualType Type, int EGW) {
524 assert((EGW == 128 || EGW == 256) && "EGW can only be 128 or 256 bits");
525
526 // LMUL * VLEN >= EGW
529 unsigned ElemSize = S.Context.getTypeSize(Info.ElementType);
530 unsigned MinElemCount = Info.EC.getKnownMinValue();
531
532 unsigned EGS = EGW / ElemSize;
533 // If EGS is less than or equal to the minimum number of elements, then the
534 // type is valid.
535 if (EGS <= MinElemCount)
536 return false;
537
538 // Otherwise, we need vscale to be at least EGS / MinElemCont.
539 assert(EGS % MinElemCount == 0);
540 unsigned VScaleFactor = EGS / MinElemCount;
541 // Vscale is VLEN/RVVBitsPerBlock.
542 unsigned MinRequiredVLEN = VScaleFactor * llvm::RISCV::RVVBitsPerBlock;
543 std::string RequiredExt = "zvl" + std::to_string(MinRequiredVLEN) + "b";
544 if (!TI.hasFeature(RequiredExt))
545 return S.Diag(TheCall->getBeginLoc(),
546 diag::err_riscv_type_requires_extension)
547 << Type << RequiredExt;
548
549 return false;
550}
551
553 unsigned BuiltinID,
554 CallExpr *TheCall) {
555 ASTContext &Context = getASTContext();
556 // vmulh.vv, vmulh.vx, vmulhu.vv, vmulhu.vx, vmulhsu.vv, vmulhsu.vx,
557 // vsmul.vv, vsmul.vx are not included for EEW=64 in Zve64*.
558 switch (BuiltinID) {
559 default:
560 break;
561 case RISCVVector::BI__builtin_rvv_vmulhsu_vv:
562 case RISCVVector::BI__builtin_rvv_vmulhsu_vx:
563 case RISCVVector::BI__builtin_rvv_vmulhsu_vv_tu:
564 case RISCVVector::BI__builtin_rvv_vmulhsu_vx_tu:
565 case RISCVVector::BI__builtin_rvv_vmulhsu_vv_m:
566 case RISCVVector::BI__builtin_rvv_vmulhsu_vx_m:
567 case RISCVVector::BI__builtin_rvv_vmulhsu_vv_mu:
568 case RISCVVector::BI__builtin_rvv_vmulhsu_vx_mu:
569 case RISCVVector::BI__builtin_rvv_vmulhsu_vv_tum:
570 case RISCVVector::BI__builtin_rvv_vmulhsu_vx_tum:
571 case RISCVVector::BI__builtin_rvv_vmulhsu_vv_tumu:
572 case RISCVVector::BI__builtin_rvv_vmulhsu_vx_tumu:
573 case RISCVVector::BI__builtin_rvv_vmulhu_vv:
574 case RISCVVector::BI__builtin_rvv_vmulhu_vx:
575 case RISCVVector::BI__builtin_rvv_vmulhu_vv_tu:
576 case RISCVVector::BI__builtin_rvv_vmulhu_vx_tu:
577 case RISCVVector::BI__builtin_rvv_vmulhu_vv_m:
578 case RISCVVector::BI__builtin_rvv_vmulhu_vx_m:
579 case RISCVVector::BI__builtin_rvv_vmulhu_vv_mu:
580 case RISCVVector::BI__builtin_rvv_vmulhu_vx_mu:
581 case RISCVVector::BI__builtin_rvv_vmulhu_vv_tum:
582 case RISCVVector::BI__builtin_rvv_vmulhu_vx_tum:
583 case RISCVVector::BI__builtin_rvv_vmulhu_vv_tumu:
584 case RISCVVector::BI__builtin_rvv_vmulhu_vx_tumu:
585 case RISCVVector::BI__builtin_rvv_vmulh_vv:
586 case RISCVVector::BI__builtin_rvv_vmulh_vx:
587 case RISCVVector::BI__builtin_rvv_vmulh_vv_tu:
588 case RISCVVector::BI__builtin_rvv_vmulh_vx_tu:
589 case RISCVVector::BI__builtin_rvv_vmulh_vv_m:
590 case RISCVVector::BI__builtin_rvv_vmulh_vx_m:
591 case RISCVVector::BI__builtin_rvv_vmulh_vv_mu:
592 case RISCVVector::BI__builtin_rvv_vmulh_vx_mu:
593 case RISCVVector::BI__builtin_rvv_vmulh_vv_tum:
594 case RISCVVector::BI__builtin_rvv_vmulh_vx_tum:
595 case RISCVVector::BI__builtin_rvv_vmulh_vv_tumu:
596 case RISCVVector::BI__builtin_rvv_vmulh_vx_tumu:
597 case RISCVVector::BI__builtin_rvv_vsmul_vv:
598 case RISCVVector::BI__builtin_rvv_vsmul_vx:
599 case RISCVVector::BI__builtin_rvv_vsmul_vv_tu:
600 case RISCVVector::BI__builtin_rvv_vsmul_vx_tu:
601 case RISCVVector::BI__builtin_rvv_vsmul_vv_m:
602 case RISCVVector::BI__builtin_rvv_vsmul_vx_m:
603 case RISCVVector::BI__builtin_rvv_vsmul_vv_mu:
604 case RISCVVector::BI__builtin_rvv_vsmul_vx_mu:
605 case RISCVVector::BI__builtin_rvv_vsmul_vv_tum:
606 case RISCVVector::BI__builtin_rvv_vsmul_vx_tum:
607 case RISCVVector::BI__builtin_rvv_vsmul_vv_tumu:
608 case RISCVVector::BI__builtin_rvv_vsmul_vx_tumu: {
610 TheCall->getType()->castAs<BuiltinType>());
611
613 llvm::StringMap<bool> FunctionFeatureMap;
614 Context.getFunctionFeatureMap(FunctionFeatureMap, FD);
615
616 if (Context.getTypeSize(Info.ElementType) == 64 && !TI.hasFeature("v") &&
617 !FunctionFeatureMap.lookup("v"))
618 return Diag(TheCall->getBeginLoc(),
619 diag::err_riscv_builtin_requires_extension)
620 << /* IsExtension */ true << TheCall->getSourceRange() << "v";
621
622 break;
623 }
624 }
625
626 switch (BuiltinID) {
627 case RISCVVector::BI__builtin_rvv_vsetvli:
628 return SemaRef.BuiltinConstantArgRange(TheCall, 1, 0, 3) ||
629 CheckLMUL(TheCall, 2);
630 case RISCVVector::BI__builtin_rvv_vsetvlimax:
631 return SemaRef.BuiltinConstantArgRange(TheCall, 0, 0, 3) ||
632 CheckLMUL(TheCall, 1);
633 case RISCVVector::BI__builtin_rvv_vget_v: {
635 Context.getBuiltinVectorTypeInfo(cast<BuiltinType>(
636 TheCall->getType().getCanonicalType().getTypePtr()));
638 Context.getBuiltinVectorTypeInfo(cast<BuiltinType>(
639 TheCall->getArg(0)->getType().getCanonicalType().getTypePtr()));
640 unsigned MaxIndex;
641 if (VecInfo.NumVectors != 1) // vget for tuple type
642 MaxIndex = VecInfo.NumVectors;
643 else // vget for non-tuple type
644 MaxIndex = (VecInfo.EC.getKnownMinValue() * VecInfo.NumVectors) /
645 (ResVecInfo.EC.getKnownMinValue() * ResVecInfo.NumVectors);
646 return SemaRef.BuiltinConstantArgRange(TheCall, 1, 0, MaxIndex - 1);
647 }
648 case RISCVVector::BI__builtin_rvv_vset_v: {
650 Context.getBuiltinVectorTypeInfo(cast<BuiltinType>(
651 TheCall->getType().getCanonicalType().getTypePtr()));
653 Context.getBuiltinVectorTypeInfo(cast<BuiltinType>(
654 TheCall->getArg(2)->getType().getCanonicalType().getTypePtr()));
655 unsigned MaxIndex;
656 if (ResVecInfo.NumVectors != 1) // vset for tuple type
657 MaxIndex = ResVecInfo.NumVectors;
658 else // vset fo non-tuple type
659 MaxIndex = (ResVecInfo.EC.getKnownMinValue() * ResVecInfo.NumVectors) /
660 (VecInfo.EC.getKnownMinValue() * VecInfo.NumVectors);
661 return SemaRef.BuiltinConstantArgRange(TheCall, 1, 0, MaxIndex - 1);
662 }
663 // Vector Crypto
664 case RISCVVector::BI__builtin_rvv_vaeskf1_vi_tu:
665 case RISCVVector::BI__builtin_rvv_vaeskf2_vi_tu:
666 case RISCVVector::BI__builtin_rvv_vaeskf2_vi:
667 case RISCVVector::BI__builtin_rvv_vsm4k_vi_tu: {
668 QualType Op1Type = TheCall->getArg(0)->getType();
669 QualType Op2Type = TheCall->getArg(1)->getType();
670 return CheckInvalidVLENandLMUL(TI, TheCall, SemaRef, Op1Type, 128) ||
671 CheckInvalidVLENandLMUL(TI, TheCall, SemaRef, Op2Type, 128) ||
672 SemaRef.BuiltinConstantArgRange(TheCall, 2, 0, 31);
673 }
674 case RISCVVector::BI__builtin_rvv_vsm3c_vi_tu:
675 case RISCVVector::BI__builtin_rvv_vsm3c_vi: {
676 QualType Op1Type = TheCall->getArg(0)->getType();
677 return CheckInvalidVLENandLMUL(TI, TheCall, SemaRef, Op1Type, 256) ||
678 SemaRef.BuiltinConstantArgRange(TheCall, 2, 0, 31);
679 }
680 case RISCVVector::BI__builtin_rvv_vaeskf1_vi:
681 case RISCVVector::BI__builtin_rvv_vsm4k_vi: {
682 QualType Op1Type = TheCall->getArg(0)->getType();
683 return CheckInvalidVLENandLMUL(TI, TheCall, SemaRef, Op1Type, 128) ||
684 SemaRef.BuiltinConstantArgRange(TheCall, 1, 0, 31);
685 }
686 case RISCVVector::BI__builtin_rvv_vaesdf_vv:
687 case RISCVVector::BI__builtin_rvv_vaesdf_vs:
688 case RISCVVector::BI__builtin_rvv_vaesdm_vv:
689 case RISCVVector::BI__builtin_rvv_vaesdm_vs:
690 case RISCVVector::BI__builtin_rvv_vaesef_vv:
691 case RISCVVector::BI__builtin_rvv_vaesef_vs:
692 case RISCVVector::BI__builtin_rvv_vaesem_vv:
693 case RISCVVector::BI__builtin_rvv_vaesem_vs:
694 case RISCVVector::BI__builtin_rvv_vaesz_vs:
695 case RISCVVector::BI__builtin_rvv_vsm4r_vv:
696 case RISCVVector::BI__builtin_rvv_vsm4r_vs:
697 case RISCVVector::BI__builtin_rvv_vaesdf_vv_tu:
698 case RISCVVector::BI__builtin_rvv_vaesdf_vs_tu:
699 case RISCVVector::BI__builtin_rvv_vaesdm_vv_tu:
700 case RISCVVector::BI__builtin_rvv_vaesdm_vs_tu:
701 case RISCVVector::BI__builtin_rvv_vaesef_vv_tu:
702 case RISCVVector::BI__builtin_rvv_vaesef_vs_tu:
703 case RISCVVector::BI__builtin_rvv_vaesem_vv_tu:
704 case RISCVVector::BI__builtin_rvv_vaesem_vs_tu:
705 case RISCVVector::BI__builtin_rvv_vaesz_vs_tu:
706 case RISCVVector::BI__builtin_rvv_vsm4r_vv_tu:
707 case RISCVVector::BI__builtin_rvv_vsm4r_vs_tu: {
708 QualType Op1Type = TheCall->getArg(0)->