[Hexagon] Make conversions to vector predicate types explicit for builtins

HVX does not have load/store instructions for vector predicates (i.e. bool
vectors). Because of that, vector predicates need to be converted to another
type before being stored, and the most convenient representation is an HVX
vector.
As a consequence, in C/C++, source-level builtins that either take or
produce vector predicates take or return regular vectors instead. On the
other hand, the corresponding LLVM intrinsics do have boolean types that,
and so a conversion of the operand or the return value was necessary.
This conversion would happen inside clang's codegen, but was somewhat
fragile.

This patch changes the strategy: a builtin that takes a vector predicate
now really expects a vector predicate. Since such a predicate cannot be
provided via a variable, this builtin must be composed with other builtins
that either convert vector to a predicate (V6_vandvrt) or predicate to a
vector (V6_vandqrt).

For users using builtins defined in hvx_hexagon_protos.h there is no impact:
the conversions were added to that file. Other users will need to insert
- __builtin_HEXAGON_V6_vandvrt[_128B](V, -1) to convert vector V to a
  vector predicate, or
- __builtin_HEXAGON_V6_vandqrt[_128B](Q, -1) to convert vector predicate Q
  to a vector.

Builtins __builtin_HEXAGON_V6_vmaskedstore.* are a temporary exception to
that, but they are deprecated and should not be used anyway. In the future
they will either follow the same rule, or be removed.
This commit is contained in:
Krzysztof Parzyszek 2021-12-22 11:29:36 -08:00
parent 86618e37bd
commit dcb3e8083a
8 changed files with 849 additions and 1083 deletions

View File

@ -8,199 +8,7 @@
// Automatically generated file, do not edit!
//===----------------------------------------------------------------------===//
CUSTOM_BUILTIN_MAPPING(A2_add, 0)
CUSTOM_BUILTIN_MAPPING(A2_addi, 0)
CUSTOM_BUILTIN_MAPPING(A2_addp, 0)
CUSTOM_BUILTIN_MAPPING(A2_and, 0)
CUSTOM_BUILTIN_MAPPING(A2_andir, 0)
CUSTOM_BUILTIN_MAPPING(A2_neg, 0)
CUSTOM_BUILTIN_MAPPING(A2_not, 0)
CUSTOM_BUILTIN_MAPPING(A2_or, 0)
CUSTOM_BUILTIN_MAPPING(A2_orir, 0)
CUSTOM_BUILTIN_MAPPING(A2_sub, 0)
CUSTOM_BUILTIN_MAPPING(A2_subp, 0)
CUSTOM_BUILTIN_MAPPING(A2_subri, 0)
CUSTOM_BUILTIN_MAPPING(A2_sxtb, 0)
CUSTOM_BUILTIN_MAPPING(A2_sxth, 0)
CUSTOM_BUILTIN_MAPPING(A2_xor, 0)
CUSTOM_BUILTIN_MAPPING(A2_zxtb, 0)
CUSTOM_BUILTIN_MAPPING(A2_zxth, 0)
CUSTOM_BUILTIN_MAPPING(M2_dpmpyss_s0, 0)
CUSTOM_BUILTIN_MAPPING(M2_dpmpyuu_s0, 0)
CUSTOM_BUILTIN_MAPPING(M2_mpyi, 0)
CUSTOM_BUILTIN_MAPPING(M2_mpysmi, 0)
CUSTOM_BUILTIN_MAPPING(M2_mpyui, 0)
CUSTOM_BUILTIN_MAPPING(S2_asl_i_p, 0)
CUSTOM_BUILTIN_MAPPING(S2_asl_i_r, 0)
CUSTOM_BUILTIN_MAPPING(S2_asr_i_p, 0)
CUSTOM_BUILTIN_MAPPING(S2_asr_i_r, 0)
CUSTOM_BUILTIN_MAPPING(S2_lsr_i_p, 0)
CUSTOM_BUILTIN_MAPPING(S2_lsr_i_r, 0)
CUSTOM_BUILTIN_MAPPING(V6_pred_and, 64)
CUSTOM_BUILTIN_MAPPING(V6_pred_and_128B, 128)
CUSTOM_BUILTIN_MAPPING(V6_pred_and_n, 64)
CUSTOM_BUILTIN_MAPPING(V6_pred_and_n_128B, 128)
CUSTOM_BUILTIN_MAPPING(V6_pred_not, 64)
CUSTOM_BUILTIN_MAPPING(V6_pred_not_128B, 128)
CUSTOM_BUILTIN_MAPPING(V6_pred_or, 64)
CUSTOM_BUILTIN_MAPPING(V6_pred_or_128B, 128)
CUSTOM_BUILTIN_MAPPING(V6_pred_or_n, 64)
CUSTOM_BUILTIN_MAPPING(V6_pred_or_n_128B, 128)
CUSTOM_BUILTIN_MAPPING(V6_pred_scalar2, 64)
CUSTOM_BUILTIN_MAPPING(V6_pred_scalar2_128B, 128)
CUSTOM_BUILTIN_MAPPING(V6_pred_xor, 64)
CUSTOM_BUILTIN_MAPPING(V6_pred_xor_128B, 128)
CUSTOM_BUILTIN_MAPPING(V6_vS32b_nqpred_ai, 64)
CUSTOM_BUILTIN_MAPPING(V6_vS32b_nqpred_ai_128B, 128)
CUSTOM_BUILTIN_MAPPING(V6_vS32b_nt_nqpred_ai, 64)
CUSTOM_BUILTIN_MAPPING(V6_vS32b_nt_nqpred_ai_128B, 128)
CUSTOM_BUILTIN_MAPPING(V6_vS32b_nt_qpred_ai, 64)
CUSTOM_BUILTIN_MAPPING(V6_vS32b_nt_qpred_ai_128B, 128)
CUSTOM_BUILTIN_MAPPING(V6_vS32b_qpred_ai, 64)
CUSTOM_BUILTIN_MAPPING(V6_vS32b_qpred_ai_128B, 128)
CUSTOM_BUILTIN_MAPPING(V6_vaddbnq, 64)
CUSTOM_BUILTIN_MAPPING(V6_vaddbnq_128B, 128)
CUSTOM_BUILTIN_MAPPING(V6_vaddbq, 64)
CUSTOM_BUILTIN_MAPPING(V6_vaddbq_128B, 128)
CUSTOM_BUILTIN_MAPPING(V6_vaddhnq, 64)
CUSTOM_BUILTIN_MAPPING(V6_vaddhnq_128B, 128)
CUSTOM_BUILTIN_MAPPING(V6_vaddhq, 64)
CUSTOM_BUILTIN_MAPPING(V6_vaddhq_128B, 128)
CUSTOM_BUILTIN_MAPPING(V6_vaddwnq, 64)
CUSTOM_BUILTIN_MAPPING(V6_vaddwnq_128B, 128)
CUSTOM_BUILTIN_MAPPING(V6_vaddwq, 64)
CUSTOM_BUILTIN_MAPPING(V6_vaddwq_128B, 128)
CUSTOM_BUILTIN_MAPPING(V6_vandqrt, 64)
CUSTOM_BUILTIN_MAPPING(V6_vandqrt_128B, 128)
CUSTOM_BUILTIN_MAPPING(V6_vandqrt_acc, 64)
CUSTOM_BUILTIN_MAPPING(V6_vandqrt_acc_128B, 128)
CUSTOM_BUILTIN_MAPPING(V6_vandvrt, 64)
CUSTOM_BUILTIN_MAPPING(V6_vandvrt_128B, 128)
CUSTOM_BUILTIN_MAPPING(V6_vandvrt_acc, 64)
CUSTOM_BUILTIN_MAPPING(V6_vandvrt_acc_128B, 128)
CUSTOM_BUILTIN_MAPPING(V6_veqb, 64)
CUSTOM_BUILTIN_MAPPING(V6_veqb_128B, 128)
CUSTOM_BUILTIN_MAPPING(V6_veqb_and, 64)
CUSTOM_BUILTIN_MAPPING(V6_veqb_and_128B, 128)
CUSTOM_BUILTIN_MAPPING(V6_veqb_or, 64)
CUSTOM_BUILTIN_MAPPING(V6_veqb_or_128B, 128)
CUSTOM_BUILTIN_MAPPING(V6_veqb_xor, 64)
CUSTOM_BUILTIN_MAPPING(V6_veqb_xor_128B, 128)
CUSTOM_BUILTIN_MAPPING(V6_veqh, 64)
CUSTOM_BUILTIN_MAPPING(V6_veqh_128B, 128)
CUSTOM_BUILTIN_MAPPING(V6_veqh_and, 64)
CUSTOM_BUILTIN_MAPPING(V6_veqh_and_128B, 128)
CUSTOM_BUILTIN_MAPPING(V6_veqh_or, 64)
CUSTOM_BUILTIN_MAPPING(V6_veqh_or_128B, 128)
CUSTOM_BUILTIN_MAPPING(V6_veqh_xor, 64)
CUSTOM_BUILTIN_MAPPING(V6_veqh_xor_128B, 128)
CUSTOM_BUILTIN_MAPPING(V6_veqw, 64)
CUSTOM_BUILTIN_MAPPING(V6_veqw_128B, 128)
CUSTOM_BUILTIN_MAPPING(V6_veqw_and, 64)
CUSTOM_BUILTIN_MAPPING(V6_veqw_and_128B, 128)
CUSTOM_BUILTIN_MAPPING(V6_veqw_or, 64)
CUSTOM_BUILTIN_MAPPING(V6_veqw_or_128B, 128)
CUSTOM_BUILTIN_MAPPING(V6_veqw_xor, 64)
CUSTOM_BUILTIN_MAPPING(V6_veqw_xor_128B, 128)
CUSTOM_BUILTIN_MAPPING(V6_vgtb, 64)
CUSTOM_BUILTIN_MAPPING(V6_vgtb_128B, 128)
CUSTOM_BUILTIN_MAPPING(V6_vgtb_and, 64)
CUSTOM_BUILTIN_MAPPING(V6_vgtb_and_128B, 128)
CUSTOM_BUILTIN_MAPPING(V6_vgtb_or, 64)
CUSTOM_BUILTIN_MAPPING(V6_vgtb_or_128B, 128)
CUSTOM_BUILTIN_MAPPING(V6_vgtb_xor, 64)
CUSTOM_BUILTIN_MAPPING(V6_vgtb_xor_128B, 128)
CUSTOM_BUILTIN_MAPPING(V6_vgth, 64)
CUSTOM_BUILTIN_MAPPING(V6_vgth_128B, 128)
CUSTOM_BUILTIN_MAPPING(V6_vgth_and, 64)
CUSTOM_BUILTIN_MAPPING(V6_vgth_and_128B, 128)
CUSTOM_BUILTIN_MAPPING(V6_vgth_or, 64)
CUSTOM_BUILTIN_MAPPING(V6_vgth_or_128B, 128)
CUSTOM_BUILTIN_MAPPING(V6_vgth_xor, 64)
CUSTOM_BUILTIN_MAPPING(V6_vgth_xor_128B, 128)
CUSTOM_BUILTIN_MAPPING(V6_vgtub, 64)
CUSTOM_BUILTIN_MAPPING(V6_vgtub_128B, 128)
CUSTOM_BUILTIN_MAPPING(V6_vgtub_and, 64)
CUSTOM_BUILTIN_MAPPING(V6_vgtub_and_128B, 128)
CUSTOM_BUILTIN_MAPPING(V6_vgtub_or, 64)
CUSTOM_BUILTIN_MAPPING(V6_vgtub_or_128B, 128)
CUSTOM_BUILTIN_MAPPING(V6_vgtub_xor, 64)
CUSTOM_BUILTIN_MAPPING(V6_vgtub_xor_128B, 128)
CUSTOM_BUILTIN_MAPPING(V6_vgtuh, 64)
CUSTOM_BUILTIN_MAPPING(V6_vgtuh_128B, 128)
CUSTOM_BUILTIN_MAPPING(V6_vgtuh_and, 64)
CUSTOM_BUILTIN_MAPPING(V6_vgtuh_and_128B, 128)
CUSTOM_BUILTIN_MAPPING(V6_vgtuh_or, 64)
CUSTOM_BUILTIN_MAPPING(V6_vgtuh_or_128B, 128)
CUSTOM_BUILTIN_MAPPING(V6_vgtuh_xor, 64)
CUSTOM_BUILTIN_MAPPING(V6_vgtuh_xor_128B, 128)
CUSTOM_BUILTIN_MAPPING(V6_vgtuw, 64)
CUSTOM_BUILTIN_MAPPING(V6_vgtuw_128B, 128)
CUSTOM_BUILTIN_MAPPING(V6_vgtuw_and, 64)
CUSTOM_BUILTIN_MAPPING(V6_vgtuw_and_128B, 128)
CUSTOM_BUILTIN_MAPPING(V6_vgtuw_or, 64)
CUSTOM_BUILTIN_MAPPING(V6_vgtuw_or_128B, 128)
CUSTOM_BUILTIN_MAPPING(V6_vgtuw_xor, 64)
CUSTOM_BUILTIN_MAPPING(V6_vgtuw_xor_128B, 128)
CUSTOM_BUILTIN_MAPPING(V6_vgtw, 64)
CUSTOM_BUILTIN_MAPPING(V6_vgtw_128B, 128)
CUSTOM_BUILTIN_MAPPING(V6_vgtw_and, 64)
CUSTOM_BUILTIN_MAPPING(V6_vgtw_and_128B, 128)
CUSTOM_BUILTIN_MAPPING(V6_vgtw_or, 64)
CUSTOM_BUILTIN_MAPPING(V6_vgtw_or_128B, 128)
CUSTOM_BUILTIN_MAPPING(V6_vgtw_xor, 64)
CUSTOM_BUILTIN_MAPPING(V6_vgtw_xor_128B, 128)
CUSTOM_BUILTIN_MAPPING(V6_vmux, 64)
CUSTOM_BUILTIN_MAPPING(V6_vmux_128B, 128)
CUSTOM_BUILTIN_MAPPING(V6_vsubbnq, 64)
CUSTOM_BUILTIN_MAPPING(V6_vsubbnq_128B, 128)
CUSTOM_BUILTIN_MAPPING(V6_vsubbq, 64)
CUSTOM_BUILTIN_MAPPING(V6_vsubbq_128B, 128)
CUSTOM_BUILTIN_MAPPING(V6_vsubhnq, 64)
CUSTOM_BUILTIN_MAPPING(V6_vsubhnq_128B, 128)
CUSTOM_BUILTIN_MAPPING(V6_vsubhq, 64)
CUSTOM_BUILTIN_MAPPING(V6_vsubhq_128B, 128)
CUSTOM_BUILTIN_MAPPING(V6_vsubwnq, 64)
CUSTOM_BUILTIN_MAPPING(V6_vsubwnq_128B, 128)
CUSTOM_BUILTIN_MAPPING(V6_vsubwq, 64)
CUSTOM_BUILTIN_MAPPING(V6_vsubwq_128B, 128)
CUSTOM_BUILTIN_MAPPING(V6_vswap, 64)
CUSTOM_BUILTIN_MAPPING(V6_vswap_128B, 128)
CUSTOM_BUILTIN_MAPPING(V6_pred_scalar2v2, 64)
CUSTOM_BUILTIN_MAPPING(V6_pred_scalar2v2_128B, 128)
CUSTOM_BUILTIN_MAPPING(V6_shuffeqh, 64)
CUSTOM_BUILTIN_MAPPING(V6_shuffeqh_128B, 128)
CUSTOM_BUILTIN_MAPPING(V6_shuffeqw, 64)
CUSTOM_BUILTIN_MAPPING(V6_shuffeqw_128B, 128)
CUSTOM_BUILTIN_MAPPING(V6_vaddcarry, 64)
CUSTOM_BUILTIN_MAPPING(V6_vaddcarry_128B, 128)
CUSTOM_BUILTIN_MAPPING(V6_vandnqrt, 64)
CUSTOM_BUILTIN_MAPPING(V6_vandnqrt_128B, 128)
CUSTOM_BUILTIN_MAPPING(V6_vandnqrt_acc, 64)
CUSTOM_BUILTIN_MAPPING(V6_vandnqrt_acc_128B, 128)
CUSTOM_BUILTIN_MAPPING(V6_vandvnqv, 64)
CUSTOM_BUILTIN_MAPPING(V6_vandvnqv_128B, 128)
CUSTOM_BUILTIN_MAPPING(V6_vandvqv, 64)
CUSTOM_BUILTIN_MAPPING(V6_vandvqv_128B, 128)
CUSTOM_BUILTIN_MAPPING(V6_vsubcarry, 64)
CUSTOM_BUILTIN_MAPPING(V6_vsubcarry_128B, 128)
CUSTOM_BUILTIN_MAPPING(V6_vgathermhq, 64)
CUSTOM_BUILTIN_MAPPING(V6_vgathermhq_128B, 128)
CUSTOM_BUILTIN_MAPPING(V6_vgathermhwq, 64)
CUSTOM_BUILTIN_MAPPING(V6_vgathermhwq_128B, 128)
CUSTOM_BUILTIN_MAPPING(V6_vgathermwq, 64)
CUSTOM_BUILTIN_MAPPING(V6_vgathermwq_128B, 128)
CUSTOM_BUILTIN_MAPPING(V6_vprefixqb, 64)
CUSTOM_BUILTIN_MAPPING(V6_vprefixqb_128B, 128)
CUSTOM_BUILTIN_MAPPING(V6_vprefixqh, 64)
CUSTOM_BUILTIN_MAPPING(V6_vprefixqh_128B, 128)
CUSTOM_BUILTIN_MAPPING(V6_vprefixqw, 64)
CUSTOM_BUILTIN_MAPPING(V6_vprefixqw_128B, 128)
CUSTOM_BUILTIN_MAPPING(V6_vscattermhq, 64)
CUSTOM_BUILTIN_MAPPING(V6_vscattermhq_128B, 128)
CUSTOM_BUILTIN_MAPPING(V6_vscattermhwq, 64)
CUSTOM_BUILTIN_MAPPING(V6_vscattermhwq_128B, 128)
CUSTOM_BUILTIN_MAPPING(V6_vscattermwq, 64)
CUSTOM_BUILTIN_MAPPING(V6_vscattermwq_128B, 128)
CUSTOM_BUILTIN_MAPPING(V6_vaddcarrysat, 64)
CUSTOM_BUILTIN_MAPPING(V6_vaddcarrysat_128B, 128)

View File

@ -18596,6 +18596,7 @@ getIntrinsicForHexagonNonGCCBuiltin(unsigned BuiltinID) {
CUSTOM_BUILTIN_MAPPING(S2_storerf_pcr, 0)
CUSTOM_BUILTIN_MAPPING(S2_storeri_pcr, 0)
CUSTOM_BUILTIN_MAPPING(S2_storerd_pcr, 0)
// Legacy builtins that take a vector in place of a vector predicate.
CUSTOM_BUILTIN_MAPPING(V6_vmaskedstoreq, 64)
CUSTOM_BUILTIN_MAPPING(V6_vmaskedstorenq, 64)
CUSTOM_BUILTIN_MAPPING(V6_vmaskedstorentq, 64)
@ -18733,6 +18734,27 @@ Value *CodeGenFunction::EmitHexagonBuiltinExpr(unsigned BuiltinID,
return Builder.CreateExtractValue(Result, 0);
}
case Hexagon::BI__builtin_HEXAGON_V6_vmaskedstoreq:
case Hexagon::BI__builtin_HEXAGON_V6_vmaskedstorenq:
case Hexagon::BI__builtin_HEXAGON_V6_vmaskedstorentq:
case Hexagon::BI__builtin_HEXAGON_V6_vmaskedstorentnq:
case Hexagon::BI__builtin_HEXAGON_V6_vmaskedstoreq_128B:
case Hexagon::BI__builtin_HEXAGON_V6_vmaskedstorenq_128B:
case Hexagon::BI__builtin_HEXAGON_V6_vmaskedstorentq_128B:
case Hexagon::BI__builtin_HEXAGON_V6_vmaskedstorentnq_128B: {
SmallVector<llvm::Value*,4> Ops;
const Expr *PredOp = E->getArg(0);
// There will be an implicit cast to a boolean vector. Strip it.
if (auto *Cast = dyn_cast<ImplicitCastExpr>(PredOp)) {
if (Cast->getCastKind() == CK_BitCast)
PredOp = Cast->getSubExpr();
Ops.push_back(V2Q(EmitScalarExpr(PredOp)));
}
for (int i = 1, e = E->getNumArgs(); i != e; ++i)
Ops.push_back(EmitScalarExpr(E->getArg(i)));
return Builder.CreateCall(CGM.getIntrinsic(ID), Ops);
}
case Hexagon::BI__builtin_HEXAGON_L2_loadrub_pci:
case Hexagon::BI__builtin_HEXAGON_L2_loadrb_pci:
case Hexagon::BI__builtin_HEXAGON_L2_loadruh_pci:
@ -18769,40 +18791,6 @@ Value *CodeGenFunction::EmitHexagonBuiltinExpr(unsigned BuiltinID,
return MakeBrevLd(Intrinsic::hexagon_L2_loadri_pbr, Int32Ty);
case Hexagon::BI__builtin_brev_ldd:
return MakeBrevLd(Intrinsic::hexagon_L2_loadrd_pbr, Int64Ty);
default: {
if (ID == Intrinsic::not_intrinsic)
return nullptr;
auto IsVectorPredTy = [](llvm::Type *T) {
return T->isVectorTy() &&
cast<llvm::VectorType>(T)->getElementType()->isIntegerTy(1);
};
llvm::Function *IntrFn = CGM.getIntrinsic(ID);
llvm::FunctionType *IntrTy = IntrFn->getFunctionType();
SmallVector<llvm::Value*,4> Ops;
for (unsigned i = 0, e = IntrTy->getNumParams(); i != e; ++i) {
llvm::Type *T = IntrTy->getParamType(i);
const Expr *A = E->getArg(i);
if (IsVectorPredTy(T)) {
// There will be an implicit cast to a boolean vector. Strip it.
if (auto *Cast = dyn_cast<ImplicitCastExpr>(A)) {
if (Cast->getCastKind() == CK_BitCast)
A = Cast->getSubExpr();
}
Ops.push_back(V2Q(EmitScalarExpr(A)));
} else {
Ops.push_back(EmitScalarExpr(A));
}
}
llvm::Value *Call = Builder.CreateCall(IntrFn, Ops);
if (IsVectorPredTy(IntrTy->getReturnType()))
Call = Q2V(Call);
return Call;
} // default
} // switch
return nullptr;

File diff suppressed because it is too large Load Diff

View File

@ -20,7 +20,7 @@ HEXAGON_Vect1024 test1(void *in, void *out) {
v2 = *p++;
q1 = *p++;
return __builtin_HEXAGON_V6_vaddcarrysat_128B(v1, v2, q1);
return __builtin_HEXAGON_V6_vaddcarrysat_128B(v1, v2, __builtin_HEXAGON_V6_vandvrt_128B(q1, -1));
}
// CHECK-LABEL: @test26

View File

@ -44,7 +44,7 @@ HEXAGON_Vect512 test5(void *in, void *out) {
v2 = *p++;
q1 = *p++;
return __builtin_HEXAGON_V6_vaddcarrysat(v1, v2, q1);
return __builtin_HEXAGON_V6_vaddcarrysat(v1, v2, __builtin_HEXAGON_V6_vandvrt(q1, -1));
}
// CHECK-LABEL: @test6

View File

@ -6,6 +6,17 @@ void test() {
int v128 __attribute__((__vector_size__(128)));
int v256 __attribute__((__vector_size__(256)));
// These are special and ugly: they take an HVX vector in place of
// the HVX vector predicate.
// CHECK: @llvm.hexagon.V6.vmaskedstorenq.128B
__builtin_HEXAGON_V6_vmaskedstorenq_128B(q128, 0, v128);
// CHECK: @llvm.hexagon.V6.vmaskedstorentnq.128B
__builtin_HEXAGON_V6_vmaskedstorentnq_128B(q128, 0, v128);
// CHECK: @llvm.hexagon.V6.vmaskedstorentq.128B
__builtin_HEXAGON_V6_vmaskedstorentq_128B(q128, 0, v128);
// CHECK: @llvm.hexagon.V6.vmaskedstoreq.128B
__builtin_HEXAGON_V6_vmaskedstoreq_128B(q128, 0, v128);
// CHECK: @llvm.hexagon.V6.extractw.128B
__builtin_HEXAGON_V6_extractw_128B(v128, 0);
// CHECK: @llvm.hexagon.V6.hi.128B
@ -19,33 +30,33 @@ void test() {
// CHECK: @llvm.hexagon.V6.lvsplatw.128B
__builtin_HEXAGON_V6_lvsplatw_128B(0);
// CHECK: @llvm.hexagon.V6.pred.and.128B
__builtin_HEXAGON_V6_pred_and_128B(q128, q128);
__builtin_HEXAGON_V6_pred_and_128B(__builtin_HEXAGON_V6_vandvrt_128B(q128, -1), __builtin_HEXAGON_V6_vandvrt_128B(q128, -1));
// CHECK: @llvm.hexagon.V6.pred.and.n.128B
__builtin_HEXAGON_V6_pred_and_n_128B(q128, q128);
__builtin_HEXAGON_V6_pred_and_n_128B(__builtin_HEXAGON_V6_vandvrt_128B(q128, -1), __builtin_HEXAGON_V6_vandvrt_128B(q128, -1));
// CHECK: @llvm.hexagon.V6.pred.not.128B
__builtin_HEXAGON_V6_pred_not_128B(q128);
__builtin_HEXAGON_V6_pred_not_128B(__builtin_HEXAGON_V6_vandvrt_128B(q128, -1));
// CHECK: @llvm.hexagon.V6.pred.or.128B
__builtin_HEXAGON_V6_pred_or_128B(q128, q128);
__builtin_HEXAGON_V6_pred_or_128B(__builtin_HEXAGON_V6_vandvrt_128B(q128, -1), __builtin_HEXAGON_V6_vandvrt_128B(q128, -1));
// CHECK: @llvm.hexagon.V6.pred.or.n.128B
__builtin_HEXAGON_V6_pred_or_n_128B(q128, q128);
__builtin_HEXAGON_V6_pred_or_n_128B(__builtin_HEXAGON_V6_vandvrt_128B(q128, -1), __builtin_HEXAGON_V6_vandvrt_128B(q128, -1));
// CHECK: @llvm.hexagon.V6.pred.scalar2.128B
__builtin_HEXAGON_V6_pred_scalar2_128B(0);
// CHECK: @llvm.hexagon.V6.pred.scalar2v2.128B
__builtin_HEXAGON_V6_pred_scalar2v2_128B(0);
// CHECK: @llvm.hexagon.V6.pred.xor.128B
__builtin_HEXAGON_V6_pred_xor_128B(q128, q128);
__builtin_HEXAGON_V6_pred_xor_128B(__builtin_HEXAGON_V6_vandvrt_128B(q128, -1), __builtin_HEXAGON_V6_vandvrt_128B(q128, -1));
// CHECK: @llvm.hexagon.V6.shuffeqh.128B
__builtin_HEXAGON_V6_shuffeqh_128B(q128, q128);
__builtin_HEXAGON_V6_shuffeqh_128B(__builtin_HEXAGON_V6_vandvrt_128B(q128, -1), __builtin_HEXAGON_V6_vandvrt_128B(q128, -1));
// CHECK: @llvm.hexagon.V6.shuffeqw.128B
__builtin_HEXAGON_V6_shuffeqw_128B(q128, q128);
__builtin_HEXAGON_V6_shuffeqw_128B(__builtin_HEXAGON_V6_vandvrt_128B(q128, -1), __builtin_HEXAGON_V6_vandvrt_128B(q128, -1));
// CHECK: @llvm.hexagon.V6.vS32b.nqpred.ai.128B
__builtin_HEXAGON_V6_vS32b_nqpred_ai_128B(q128, 0, v128);
__builtin_HEXAGON_V6_vS32b_nqpred_ai_128B(__builtin_HEXAGON_V6_vandvrt_128B(q128, -1), 0, v128);
// CHECK: @llvm.hexagon.V6.vS32b.nt.nqpred.ai.128B
__builtin_HEXAGON_V6_vS32b_nt_nqpred_ai_128B(q128, 0, v128);
__builtin_HEXAGON_V6_vS32b_nt_nqpred_ai_128B(__builtin_HEXAGON_V6_vandvrt_128B(q128, -1), 0, v128);
// CHECK: @llvm.hexagon.V6.vS32b.nt.qpred.ai.128B
__builtin_HEXAGON_V6_vS32b_nt_qpred_ai_128B(q128, 0, v128);
__builtin_HEXAGON_V6_vS32b_nt_qpred_ai_128B(__builtin_HEXAGON_V6_vandvrt_128B(q128, -1), 0, v128);
// CHECK: @llvm.hexagon.V6.vS32b.qpred.ai.128B
__builtin_HEXAGON_V6_vS32b_qpred_ai_128B(q128, 0, v128);
__builtin_HEXAGON_V6_vS32b_qpred_ai_128B(__builtin_HEXAGON_V6_vandvrt_128B(q128, -1), 0, v128);
// CHECK: @llvm.hexagon.V6.vabsb.128B
__builtin_HEXAGON_V6_vabsb_128B(v128);
// CHECK: @llvm.hexagon.V6.vabsb.sat.128B
@ -71,9 +82,9 @@ void test() {
// CHECK: @llvm.hexagon.V6.vaddb.dv.128B
__builtin_HEXAGON_V6_vaddb_dv_128B(v256, v256);
// CHECK: @llvm.hexagon.V6.vaddbnq.128B
__builtin_HEXAGON_V6_vaddbnq_128B(q128, v128, v128);
__builtin_HEXAGON_V6_vaddbnq_128B(__builtin_HEXAGON_V6_vandvrt_128B(q128, -1), v128, v128);
// CHECK: @llvm.hexagon.V6.vaddbq.128B
__builtin_HEXAGON_V6_vaddbq_128B(q128, v128, v128);
__builtin_HEXAGON_V6_vaddbq_128B(__builtin_HEXAGON_V6_vandvrt_128B(q128, -1), v128, v128);
// CHECK: @llvm.hexagon.V6.vaddbsat.128B
__builtin_HEXAGON_V6_vaddbsat_128B(v128, v128);
// CHECK: @llvm.hexagon.V6.vaddbsat.dv.128B
@ -89,9 +100,9 @@ void test() {
// CHECK: @llvm.hexagon.V6.vaddh.dv.128B
__builtin_HEXAGON_V6_vaddh_dv_128B(v256, v256);
// CHECK: @llvm.hexagon.V6.vaddhnq.128B
__builtin_HEXAGON_V6_vaddhnq_128B(q128, v128, v128);
__builtin_HEXAGON_V6_vaddhnq_128B(__builtin_HEXAGON_V6_vandvrt_128B(q128, -1), v128, v128);
// CHECK: @llvm.hexagon.V6.vaddhq.128B
__builtin_HEXAGON_V6_vaddhq_128B(q128, v128, v128);
__builtin_HEXAGON_V6_vaddhq_128B(__builtin_HEXAGON_V6_vandvrt_128B(q128, -1), v128, v128);
// CHECK: @llvm.hexagon.V6.vaddhsat.128B
__builtin_HEXAGON_V6_vaddhsat_128B(v128, v128);
// CHECK: @llvm.hexagon.V6.vaddhsat.dv.128B
@ -127,9 +138,9 @@ void test() {
// CHECK: @llvm.hexagon.V6.vaddw.dv.128B
__builtin_HEXAGON_V6_vaddw_dv_128B(v256, v256);
// CHECK: @llvm.hexagon.V6.vaddwnq.128B
__builtin_HEXAGON_V6_vaddwnq_128B(q128, v128, v128);
__builtin_HEXAGON_V6_vaddwnq_128B(__builtin_HEXAGON_V6_vandvrt_128B(q128, -1), v128, v128);
// CHECK: @llvm.hexagon.V6.vaddwq.128B
__builtin_HEXAGON_V6_vaddwq_128B(q128, v128, v128);
__builtin_HEXAGON_V6_vaddwq_128B(__builtin_HEXAGON_V6_vandvrt_128B(q128, -1), v128, v128);
// CHECK: @llvm.hexagon.V6.vaddwsat.128B
__builtin_HEXAGON_V6_vaddwsat_128B(v128, v128);
// CHECK: @llvm.hexagon.V6.vaddwsat.dv.128B
@ -141,21 +152,21 @@ void test() {
// CHECK: @llvm.hexagon.V6.vand.128B
__builtin_HEXAGON_V6_vand_128B(v128, v128);
// CHECK: @llvm.hexagon.V6.vandnqrt.128B
__builtin_HEXAGON_V6_vandnqrt_128B(q128, 0);
__builtin_HEXAGON_V6_vandnqrt_128B(__builtin_HEXAGON_V6_vandvrt_128B(q128, -1), 0);
// CHECK: @llvm.hexagon.V6.vandnqrt.acc.128B
__builtin_HEXAGON_V6_vandnqrt_acc_128B(v128, q128, 0);
__builtin_HEXAGON_V6_vandnqrt_acc_128B(v128, __builtin_HEXAGON_V6_vandvrt_128B(q128, -1), 0);
// CHECK: @llvm.hexagon.V6.vandqrt.128B
__builtin_HEXAGON_V6_vandqrt_128B(q128, 0);
__builtin_HEXAGON_V6_vandqrt_128B(__builtin_HEXAGON_V6_vandvrt_128B(q128, -1), 0);
// CHECK: @llvm.hexagon.V6.vandqrt.acc.128B
__builtin_HEXAGON_V6_vandqrt_acc_128B(v128, q128, 0);
__builtin_HEXAGON_V6_vandqrt_acc_128B(v128, __builtin_HEXAGON_V6_vandvrt_128B(q128, -1), 0);
// CHECK: @llvm.hexagon.V6.vandvnqv.128B
__builtin_HEXAGON_V6_vandvnqv_128B(q128, v128);
__builtin_HEXAGON_V6_vandvnqv_128B(__builtin_HEXAGON_V6_vandvrt_128B(q128, -1), v128);
// CHECK: @llvm.hexagon.V6.vandvqv.128B
__builtin_HEXAGON_V6_vandvqv_128B(q128, v128);
__builtin_HEXAGON_V6_vandvqv_128B(__builtin_HEXAGON_V6_vandvrt_128B(q128, -1), v128);
// CHECK: @llvm.hexagon.V6.vandvrt.128B
__builtin_HEXAGON_V6_vandvrt_128B(v128, 0);
// CHECK: @llvm.hexagon.V6.vandvrt.acc.128B
__builtin_HEXAGON_V6_vandvrt_acc_128B(q128, v128, 0);
__builtin_HEXAGON_V6_vandvrt_acc_128B(__builtin_HEXAGON_V6_vandvrt_128B(q128, -1), v128, 0);
// CHECK: @llvm.hexagon.V6.vaslh.128B
__builtin_HEXAGON_V6_vaslh_128B(v128, 0);
// CHECK: @llvm.hexagon.V6.vaslh.acc.128B
@ -297,87 +308,87 @@ void test() {
// CHECK: @llvm.hexagon.V6.veqb.128B
__builtin_HEXAGON_V6_veqb_128B(v128, v128);
// CHECK: @llvm.hexagon.V6.veqb.and.128B
__builtin_HEXAGON_V6_veqb_and_128B(q128, v128, v128);
__builtin_HEXAGON_V6_veqb_and_128B(__builtin_HEXAGON_V6_vandvrt_128B(q128, -1), v128, v128);
// CHECK: @llvm.hexagon.V6.veqb.or.128B
__builtin_HEXAGON_V6_veqb_or_128B(q128, v128, v128);
__builtin_HEXAGON_V6_veqb_or_128B(__builtin_HEXAGON_V6_vandvrt_128B(q128, -1), v128, v128);
// CHECK: @llvm.hexagon.V6.veqb.xor.128B
__builtin_HEXAGON_V6_veqb_xor_128B(q128, v128, v128);
__builtin_HEXAGON_V6_veqb_xor_128B(__builtin_HEXAGON_V6_vandvrt_128B(q128, -1), v128, v128);
// CHECK: @llvm.hexagon.V6.veqh.128B
__builtin_HEXAGON_V6_veqh_128B(v128, v128);
// CHECK: @llvm.hexagon.V6.veqh.and.128B
__builtin_HEXAGON_V6_veqh_and_128B(q128, v128, v128);
__builtin_HEXAGON_V6_veqh_and_128B(__builtin_HEXAGON_V6_vandvrt_128B(q128, -1), v128, v128);
// CHECK: @llvm.hexagon.V6.veqh.or.128B
__builtin_HEXAGON_V6_veqh_or_128B(q128, v128, v128);
__builtin_HEXAGON_V6_veqh_or_128B(__builtin_HEXAGON_V6_vandvrt_128B(q128, -1), v128, v128);
// CHECK: @llvm.hexagon.V6.veqh.xor.128B
__builtin_HEXAGON_V6_veqh_xor_128B(q128, v128, v128);
__builtin_HEXAGON_V6_veqh_xor_128B(__builtin_HEXAGON_V6_vandvrt_128B(q128, -1), v128, v128);
// CHECK: @llvm.hexagon.V6.veqw.128B
__builtin_HEXAGON_V6_veqw_128B(v128, v128);
// CHECK: @llvm.hexagon.V6.veqw.and.128B
__builtin_HEXAGON_V6_veqw_and_128B(q128, v128, v128);
__builtin_HEXAGON_V6_veqw_and_128B(__builtin_HEXAGON_V6_vandvrt_128B(q128, -1), v128, v128);
// CHECK: @llvm.hexagon.V6.veqw.or.128B
__builtin_HEXAGON_V6_veqw_or_128B(q128, v128, v128);
__builtin_HEXAGON_V6_veqw_or_128B(__builtin_HEXAGON_V6_vandvrt_128B(q128, -1), v128, v128);
// CHECK: @llvm.hexagon.V6.veqw.xor.128B
__builtin_HEXAGON_V6_veqw_xor_128B(q128, v128, v128);
__builtin_HEXAGON_V6_veqw_xor_128B(__builtin_HEXAGON_V6_vandvrt_128B(q128, -1), v128, v128);
// CHECK: @llvm.hexagon.V6.vgathermh.128B
__builtin_HEXAGON_V6_vgathermh_128B(0, 0, 0, v128);
// CHECK: @llvm.hexagon.V6.vgathermhq.128B
__builtin_HEXAGON_V6_vgathermhq_128B(0, q128, 0, 0, v128);
__builtin_HEXAGON_V6_vgathermhq_128B(0, __builtin_HEXAGON_V6_vandvrt_128B(q128, -1), 0, 0, v128);
// CHECK: @llvm.hexagon.V6.vgathermhw.128B
__builtin_HEXAGON_V6_vgathermhw_128B(0, 0, 0, v256);
// CHECK: @llvm.hexagon.V6.vgathermhwq.128B
__builtin_HEXAGON_V6_vgathermhwq_128B(0, q128, 0, 0, v256);
__builtin_HEXAGON_V6_vgathermhwq_128B(0, __builtin_HEXAGON_V6_vandvrt_128B(q128, -1), 0, 0, v256);
// CHECK: @llvm.hexagon.V6.vgathermw.128B
__builtin_HEXAGON_V6_vgathermw_128B(0, 0, 0, v128);
// CHECK: @llvm.hexagon.V6.vgathermwq.128B
__builtin_HEXAGON_V6_vgathermwq_128B(0, q128, 0, 0, v128);
__builtin_HEXAGON_V6_vgathermwq_128B(0, __builtin_HEXAGON_V6_vandvrt_128B(q128, -1), 0, 0, v128);
// CHECK: @llvm.hexagon.V6.vgtb.128B
__builtin_HEXAGON_V6_vgtb_128B(v128, v128);
// CHECK: @llvm.hexagon.V6.vgtb.and.128B
__builtin_HEXAGON_V6_vgtb_and_128B(q128, v128, v128);
__builtin_HEXAGON_V6_vgtb_and_128B(__builtin_HEXAGON_V6_vandvrt_128B(q128, -1), v128, v128);
// CHECK: @llvm.hexagon.V6.vgtb.or.128B
__builtin_HEXAGON_V6_vgtb_or_128B(q128, v128, v128);
__builtin_HEXAGON_V6_vgtb_or_128B(__builtin_HEXAGON_V6_vandvrt_128B(q128, -1), v128, v128);
// CHECK: @llvm.hexagon.V6.vgtb.xor.128B
__builtin_HEXAGON_V6_vgtb_xor_128B(q128, v128, v128);
__builtin_HEXAGON_V6_vgtb_xor_128B(__builtin_HEXAGON_V6_vandvrt_128B(q128, -1), v128, v128);
// CHECK: @llvm.hexagon.V6.vgth.128B
__builtin_HEXAGON_V6_vgth_128B(v128, v128);
// CHECK: @llvm.hexagon.V6.vgth.and.128B
__builtin_HEXAGON_V6_vgth_and_128B(q128, v128, v128);
__builtin_HEXAGON_V6_vgth_and_128B(__builtin_HEXAGON_V6_vandvrt_128B(q128, -1), v128, v128);
// CHECK: @llvm.hexagon.V6.vgth.or.128B
__builtin_HEXAGON_V6_vgth_or_128B(q128, v128, v128);
__builtin_HEXAGON_V6_vgth_or_128B(__builtin_HEXAGON_V6_vandvrt_128B(q128, -1), v128, v128);
// CHECK: @llvm.hexagon.V6.vgth.xor.128B
__builtin_HEXAGON_V6_vgth_xor_128B(q128, v128, v128);
__builtin_HEXAGON_V6_vgth_xor_128B(__builtin_HEXAGON_V6_vandvrt_128B(q128, -1), v128, v128);
// CHECK: @llvm.hexagon.V6.vgtub.128B
__builtin_HEXAGON_V6_vgtub_128B(v128, v128);
// CHECK: @llvm.hexagon.V6.vgtub.and.128B
__builtin_HEXAGON_V6_vgtub_and_128B(q128, v128, v128);
__builtin_HEXAGON_V6_vgtub_and_128B(__builtin_HEXAGON_V6_vandvrt_128B(q128, -1), v128, v128);
// CHECK: @llvm.hexagon.V6.vgtub.or.128B
__builtin_HEXAGON_V6_vgtub_or_128B(q128, v128, v128);
__builtin_HEXAGON_V6_vgtub_or_128B(__builtin_HEXAGON_V6_vandvrt_128B(q128, -1), v128, v128);
// CHECK: @llvm.hexagon.V6.vgtub.xor.128B
__builtin_HEXAGON_V6_vgtub_xor_128B(q128, v128, v128);
__builtin_HEXAGON_V6_vgtub_xor_128B(__builtin_HEXAGON_V6_vandvrt_128B(q128, -1), v128, v128);
// CHECK: @llvm.hexagon.V6.vgtuh.128B
__builtin_HEXAGON_V6_vgtuh_128B(v128, v128);
// CHECK: @llvm.hexagon.V6.vgtuh.and.128B
__builtin_HEXAGON_V6_vgtuh_and_128B(q128, v128, v128);
__builtin_HEXAGON_V6_vgtuh_and_128B(__builtin_HEXAGON_V6_vandvrt_128B(q128, -1), v128, v128);
// CHECK: @llvm.hexagon.V6.vgtuh.or.128B
__builtin_HEXAGON_V6_vgtuh_or_128B(q128, v128, v128);
__builtin_HEXAGON_V6_vgtuh_or_128B(__builtin_HEXAGON_V6_vandvrt_128B(q128, -1), v128, v128);
// CHECK: @llvm.hexagon.V6.vgtuh.xor.128B
__builtin_HEXAGON_V6_vgtuh_xor_128B(q128, v128, v128);
__builtin_HEXAGON_V6_vgtuh_xor_128B(__builtin_HEXAGON_V6_vandvrt_128B(q128, -1), v128, v128);
// CHECK: @llvm.hexagon.V6.vgtuw.128B
__builtin_HEXAGON_V6_vgtuw_128B(v128, v128);
// CHECK: @llvm.hexagon.V6.vgtuw.and.128B
__builtin_HEXAGON_V6_vgtuw_and_128B(q128, v128, v128);
__builtin_HEXAGON_V6_vgtuw_and_128B(__builtin_HEXAGON_V6_vandvrt_128B(q128, -1), v128, v128);
// CHECK: @llvm.hexagon.V6.vgtuw.or.128B
__builtin_HEXAGON_V6_vgtuw_or_128B(q128, v128, v128);
__builtin_HEXAGON_V6_vgtuw_or_128B(__builtin_HEXAGON_V6_vandvrt_128B(q128, -1), v128, v128);
// CHECK: @llvm.hexagon.V6.vgtuw.xor.128B
__builtin_HEXAGON_V6_vgtuw_xor_128B(q128, v128, v128);
__builtin_HEXAGON_V6_vgtuw_xor_128B(__builtin_HEXAGON_V6_vandvrt_128B(q128, -1), v128, v128);
// CHECK: @llvm.hexagon.V6.vgtw.128B
__builtin_HEXAGON_V6_vgtw_128B(v128, v128);
// CHECK: @llvm.hexagon.V6.vgtw.and.128B
__builtin_HEXAGON_V6_vgtw_and_128B(q128, v128, v128);
__builtin_HEXAGON_V6_vgtw_and_128B(__builtin_HEXAGON_V6_vandvrt_128B(q128, -1), v128, v128);
// CHECK: @llvm.hexagon.V6.vgtw.or.128B
__builtin_HEXAGON_V6_vgtw_or_128B(q128, v128, v128);
__builtin_HEXAGON_V6_vgtw_or_128B(__builtin_HEXAGON_V6_vandvrt_128B(q128, -1), v128, v128);
// CHECK: @llvm.hexagon.V6.vgtw.xor.128B
__builtin_HEXAGON_V6_vgtw_xor_128B(q128, v128, v128);
__builtin_HEXAGON_V6_vgtw_xor_128B(__builtin_HEXAGON_V6_vandvrt_128B(q128, -1), v128, v128);
// CHECK: @llvm.hexagon.V6.vinsertwr.128B
__builtin_HEXAGON_V6_vinsertwr_128B(v128, 0);
// CHECK: @llvm.hexagon.V6.vlalignb.128B
@ -416,14 +427,6 @@ void test() {
__builtin_HEXAGON_V6_vlutvwh_oracci_128B(v256, v128, v128, 0);
// CHECK: @llvm.hexagon.V6.vlutvwhi.128B
__builtin_HEXAGON_V6_vlutvwhi_128B(v128, v128, 0);
// CHECK: @llvm.hexagon.V6.vmaskedstorenq.128B
__builtin_HEXAGON_V6_vmaskedstorenq_128B(q128, 0, v128);
// CHECK: @llvm.hexagon.V6.vmaskedstorentnq.128B
__builtin_HEXAGON_V6_vmaskedstorentnq_128B(q128, 0, v128);
// CHECK: @llvm.hexagon.V6.vmaskedstorentq.128B
__builtin_HEXAGON_V6_vmaskedstorentq_128B(q128, 0, v128);
// CHECK: @llvm.hexagon.V6.vmaskedstoreq.128B
__builtin_HEXAGON_V6_vmaskedstoreq_128B(q128, 0, v128);
// CHECK: @llvm.hexagon.V6.vmaxb.128B
__builtin_HEXAGON_V6_vmaxb_128B(v128, v128);
// CHECK: @llvm.hexagon.V6.vmaxh.128B
@ -567,7 +570,7 @@ void test() {
// CHECK: @llvm.hexagon.V6.vmpyuhv.acc.128B
__builtin_HEXAGON_V6_vmpyuhv_acc_128B(v256, v128, v128);
// CHECK: @llvm.hexagon.V6.vmux.128B
__builtin_HEXAGON_V6_vmux_128B(q128, v128, v128);
__builtin_HEXAGON_V6_vmux_128B(__builtin_HEXAGON_V6_vandvrt_128B(q128, -1), v128, v128);
// CHECK: @llvm.hexagon.V6.vnavgb.128B
__builtin_HEXAGON_V6_vnavgb_128B(v128, v128);
// CHECK: @llvm.hexagon.V6.vnavgh.128B
@ -603,11 +606,11 @@ void test() {
// CHECK: @llvm.hexagon.V6.vpopcounth.128B
__builtin_HEXAGON_V6_vpopcounth_128B(v128);
// CHECK: @llvm.hexagon.V6.vprefixqb.128B
__builtin_HEXAGON_V6_vprefixqb_128B(q128);
__builtin_HEXAGON_V6_vprefixqb_128B(__builtin_HEXAGON_V6_vandvrt_128B(q128, -1));
// CHECK: @llvm.hexagon.V6.vprefixqh.128B
__builtin_HEXAGON_V6_vprefixqh_128B(q128);
__builtin_HEXAGON_V6_vprefixqh_128B(__builtin_HEXAGON_V6_vandvrt_128B(q128, -1));
// CHECK: @llvm.hexagon.V6.vprefixqw.128B
__builtin_HEXAGON_V6_vprefixqw_128B(q128);
__builtin_HEXAGON_V6_vprefixqw_128B(__builtin_HEXAGON_V6_vandvrt_128B(q128, -1));
// CHECK: @llvm.hexagon.V6.vrdelta.128B
__builtin_HEXAGON_V6_vrdelta_128B(v128, v128);
// CHECK: @llvm.hexagon.V6.vrmpybub.rtt.128B
@ -677,19 +680,19 @@ void test() {
// CHECK: @llvm.hexagon.V6.vscattermh.add.128B
__builtin_HEXAGON_V6_vscattermh_add_128B(0, 0, v128, v128);
// CHECK: @llvm.hexagon.V6.vscattermhq.128B
__builtin_HEXAGON_V6_vscattermhq_128B(q128, 0, 0, v128, v128);
__builtin_HEXAGON_V6_vscattermhq_128B(__builtin_HEXAGON_V6_vandvrt_128B(q128, -1), 0, 0, v128, v128);
// CHECK: @llvm.hexagon.V6.vscattermhw.128B
__builtin_HEXAGON_V6_vscattermhw_128B(0, 0, v256, v128);
// CHECK: @llvm.hexagon.V6.vscattermhw.add.128B
__builtin_HEXAGON_V6_vscattermhw_add_128B(0, 0, v256, v128);
// CHECK: @llvm.hexagon.V6.vscattermhwq.128B
__builtin_HEXAGON_V6_vscattermhwq_128B(q128, 0, 0, v256, v128);
__builtin_HEXAGON_V6_vscattermhwq_128B(__builtin_HEXAGON_V6_vandvrt_128B(q128, -1), 0, 0, v256, v128);
// CHECK: @llvm.hexagon.V6.vscattermw.128B
__builtin_HEXAGON_V6_vscattermw_128B(0, 0, v128, v128);
// CHECK: @llvm.hexagon.V6.vscattermw.add.128B
__builtin_HEXAGON_V6_vscattermw_add_128B(0, 0, v128, v128);
// CHECK: @llvm.hexagon.V6.vscattermwq.128B
__builtin_HEXAGON_V6_vscattermwq_128B(q128, 0, 0, v128, v128);
__builtin_HEXAGON_V6_vscattermwq_128B(__builtin_HEXAGON_V6_vandvrt_128B(q128, -1), 0, 0, v128, v128);
// CHECK: @llvm.hexagon.V6.vsh.128B
__builtin_HEXAGON_V6_vsh_128B(v128);
// CHECK: @llvm.hexagon.V6.vshufeh.128B
@ -715,9 +718,9 @@ void test() {
// CHECK: @llvm.hexagon.V6.vsubb.dv.128B
__builtin_HEXAGON_V6_vsubb_dv_128B(v256, v256);
// CHECK: @llvm.hexagon.V6.vsubbnq.128B
__builtin_HEXAGON_V6_vsubbnq_128B(q128, v128, v128);
__builtin_HEXAGON_V6_vsubbnq_128B(__builtin_HEXAGON_V6_vandvrt_128B(q128, -1), v128, v128);
// CHECK: @llvm.hexagon.V6.vsubbq.128B
__builtin_HEXAGON_V6_vsubbq_128B(q128, v128, v128);
__builtin_HEXAGON_V6_vsubbq_128B(__builtin_HEXAGON_V6_vandvrt_128B(q128, -1), v128, v128);
// CHECK: @llvm.hexagon.V6.vsubbsat.128B
__builtin_HEXAGON_V6_vsubbsat_128B(v128, v128);
// CHECK: @llvm.hexagon.V6.vsubbsat.dv.128B
@ -729,9 +732,9 @@ void test() {
// CHECK: @llvm.hexagon.V6.vsubh.dv.128B
__builtin_HEXAGON_V6_vsubh_dv_128B(v256, v256);
// CHECK: @llvm.hexagon.V6.vsubhnq.128B
__builtin_HEXAGON_V6_vsubhnq_128B(q128, v128, v128);
__builtin_HEXAGON_V6_vsubhnq_128B(__builtin_HEXAGON_V6_vandvrt_128B(q128, -1), v128, v128);
// CHECK: @llvm.hexagon.V6.vsubhq.128B
__builtin_HEXAGON_V6_vsubhq_128B(q128, v128, v128);
__builtin_HEXAGON_V6_vsubhq_128B(__builtin_HEXAGON_V6_vandvrt_128B(q128, -1), v128, v128);
// CHECK: @llvm.hexagon.V6.vsubhsat.128B
__builtin_HEXAGON_V6_vsubhsat_128B(v128, v128);
// CHECK: @llvm.hexagon.V6.vsubhsat.dv.128B
@ -761,15 +764,15 @@ void test() {
// CHECK: @llvm.hexagon.V6.vsubw.dv.128B
__builtin_HEXAGON_V6_vsubw_dv_128B(v256, v256);
// CHECK: @llvm.hexagon.V6.vsubwnq.128B
__builtin_HEXAGON_V6_vsubwnq_128B(q128, v128, v128);
__builtin_HEXAGON_V6_vsubwnq_128B(__builtin_HEXAGON_V6_vandvrt_128B(q128, -1), v128, v128);
// CHECK: @llvm.hexagon.V6.vsubwq.128B
__builtin_HEXAGON_V6_vsubwq_128B(q128, v128, v128);
__builtin_HEXAGON_V6_vsubwq_128B(__builtin_HEXAGON_V6_vandvrt_128B(q128, -1), v128, v128);
// CHECK: @llvm.hexagon.V6.vsubwsat.128B
__builtin_HEXAGON_V6_vsubwsat_128B(v128, v128);
// CHECK: @llvm.hexagon.V6.vsubwsat.dv.128B
__builtin_HEXAGON_V6_vsubwsat_dv_128B(v256, v256);
// CHECK: @llvm.hexagon.V6.vswap.128B
__builtin_HEXAGON_V6_vswap_128B(q128, v128, v128);
__builtin_HEXAGON_V6_vswap_128B(__builtin_HEXAGON_V6_vandvrt_128B(q128, -1), v128, v128);
// CHECK: @llvm.hexagon.V6.vtmpyb.128B
__builtin_HEXAGON_V6_vtmpyb_128B(v256, 0);
// CHECK: @llvm.hexagon.V6.vtmpyb.acc.128B

View File

@ -6,6 +6,17 @@ void test() {
int v64 __attribute__((__vector_size__(64)));
int v128 __attribute__((__vector_size__(128)));
// These are special and ugly: they take an HVX vector in place of
// the HVX vector predicate.
// CHECK: @llvm.hexagon.V6.vmaskedstorenq
__builtin_HEXAGON_V6_vmaskedstorenq(q64, 0, v64);
// CHECK: @llvm.hexagon.V6.vmaskedstorentnq
__builtin_HEXAGON_V6_vmaskedstorentnq(q64, 0, v64);
// CHECK: @llvm.hexagon.V6.vmaskedstorentq
__builtin_HEXAGON_V6_vmaskedstorentq(q64, 0, v64);
// CHECK: @llvm.hexagon.V6.vmaskedstoreq
__builtin_HEXAGON_V6_vmaskedstoreq(q64, 0, v64);
// CHECK: @llvm.hexagon.V6.extractw
__builtin_HEXAGON_V6_extractw(v64, 0);
// CHECK: @llvm.hexagon.V6.hi
@ -19,33 +30,33 @@ void test() {
// CHECK: @llvm.hexagon.V6.lvsplatw
__builtin_HEXAGON_V6_lvsplatw(0);
// CHECK: @llvm.hexagon.V6.pred.and
__builtin_HEXAGON_V6_pred_and(q64, q64);
__builtin_HEXAGON_V6_pred_and(__builtin_HEXAGON_V6_vandvrt(q64, -1), __builtin_HEXAGON_V6_vandvrt(q64, -1));
// CHECK: @llvm.hexagon.V6.pred.and.n
__builtin_HEXAGON_V6_pred_and_n(q64, q64);
__builtin_HEXAGON_V6_pred_and_n(__builtin_HEXAGON_V6_vandvrt(q64, -1), __builtin_HEXAGON_V6_vandvrt(q64, -1));
// CHECK: @llvm.hexagon.V6.pred.not
__builtin_HEXAGON_V6_pred_not(q64);
__builtin_HEXAGON_V6_pred_not(__builtin_HEXAGON_V6_vandvrt(q64, -1));
// CHECK: @llvm.hexagon.V6.pred.or
__builtin_HEXAGON_V6_pred_or(q64, q64);
__builtin_HEXAGON_V6_pred_or(__builtin_HEXAGON_V6_vandvrt(q64, -1), __builtin_HEXAGON_V6_vandvrt(q64, -1));
// CHECK: @llvm.hexagon.V6.pred.or.n
__builtin_HEXAGON_V6_pred_or_n(q64, q64);
__builtin_HEXAGON_V6_pred_or_n(__builtin_HEXAGON_V6_vandvrt(q64, -1), __builtin_HEXAGON_V6_vandvrt(q64, -1));
// CHECK: @llvm.hexagon.V6.pred.scalar2
__builtin_HEXAGON_V6_pred_scalar2(0);
// CHECK: @llvm.hexagon.V6.pred.scalar2v2
__builtin_HEXAGON_V6_pred_scalar2v2(0);
// CHECK: @llvm.hexagon.V6.pred.xor
__builtin_HEXAGON_V6_pred_xor(q64, q64);
__builtin_HEXAGON_V6_pred_xor(__builtin_HEXAGON_V6_vandvrt(q64, -1), __builtin_HEXAGON_V6_vandvrt(q64, -1));
// CHECK: @llvm.hexagon.V6.shuffeqh
__builtin_HEXAGON_V6_shuffeqh(q64, q64);
__builtin_HEXAGON_V6_shuffeqh(__builtin_HEXAGON_V6_vandvrt(q64, -1), __builtin_HEXAGON_V6_vandvrt(q64, -1));
// CHECK: @llvm.hexagon.V6.shuffeqw
__builtin_HEXAGON_V6_shuffeqw(q64, q64);
__builtin_HEXAGON_V6_shuffeqw(__builtin_HEXAGON_V6_vandvrt(q64, -1), __builtin_HEXAGON_V6_vandvrt(q64, -1));
// CHECK: @llvm.hexagon.V6.vS32b.nqpred.ai
__builtin_HEXAGON_V6_vS32b_nqpred_ai(q64, 0, v64);
__builtin_HEXAGON_V6_vS32b_nqpred_ai(__builtin_HEXAGON_V6_vandvrt(q64, -1), 0, v64);
// CHECK: @llvm.hexagon.V6.vS32b.nt.nqpred.ai
__builtin_HEXAGON_V6_vS32b_nt_nqpred_ai(q64, 0, v64);
__builtin_HEXAGON_V6_vS32b_nt_nqpred_ai(__builtin_HEXAGON_V6_vandvrt(q64, -1), 0, v64);
// CHECK: @llvm.hexagon.V6.vS32b.nt.qpred.ai
__builtin_HEXAGON_V6_vS32b_nt_qpred_ai(q64, 0, v64);
__builtin_HEXAGON_V6_vS32b_nt_qpred_ai(__builtin_HEXAGON_V6_vandvrt(q64, -1), 0, v64);
// CHECK: @llvm.hexagon.V6.vS32b.qpred.ai
__builtin_HEXAGON_V6_vS32b_qpred_ai(q64, 0, v64);
__builtin_HEXAGON_V6_vS32b_qpred_ai(__builtin_HEXAGON_V6_vandvrt(q64, -1), 0, v64);
// CHECK: @llvm.hexagon.V6.vabsb
__builtin_HEXAGON_V6_vabsb(v64);
// CHECK: @llvm.hexagon.V6.vabsb.sat
@ -71,9 +82,9 @@ void test() {
// CHECK: @llvm.hexagon.V6.vaddb.dv
__builtin_HEXAGON_V6_vaddb_dv(v128, v128);
// CHECK: @llvm.hexagon.V6.vaddbnq
__builtin_HEXAGON_V6_vaddbnq(q64, v64, v64);
__builtin_HEXAGON_V6_vaddbnq(__builtin_HEXAGON_V6_vandvrt(q64, -1), v64, v64);
// CHECK: @llvm.hexagon.V6.vaddbq
__builtin_HEXAGON_V6_vaddbq(q64, v64, v64);
__builtin_HEXAGON_V6_vaddbq(__builtin_HEXAGON_V6_vandvrt(q64, -1), v64, v64);
// CHECK: @llvm.hexagon.V6.vaddbsat
__builtin_HEXAGON_V6_vaddbsat(v64, v64);
// CHECK: @llvm.hexagon.V6.vaddbsat.dv
@ -89,9 +100,9 @@ void test() {
// CHECK: @llvm.hexagon.V6.vaddh.dv
__builtin_HEXAGON_V6_vaddh_dv(v128, v128);
// CHECK: @llvm.hexagon.V6.vaddhnq
__builtin_HEXAGON_V6_vaddhnq(q64, v64, v64);
__builtin_HEXAGON_V6_vaddhnq(__builtin_HEXAGON_V6_vandvrt(q64, -1), v64, v64);
// CHECK: @llvm.hexagon.V6.vaddhq
__builtin_HEXAGON_V6_vaddhq(q64, v64, v64);
__builtin_HEXAGON_V6_vaddhq(__builtin_HEXAGON_V6_vandvrt(q64, -1), v64, v64);
// CHECK: @llvm.hexagon.V6.vaddhsat
__builtin_HEXAGON_V6_vaddhsat(v64, v64);
// CHECK: @llvm.hexagon.V6.vaddhsat.dv
@ -127,9 +138,9 @@ void test() {
// CHECK: @llvm.hexagon.V6.vaddw.dv
__builtin_HEXAGON_V6_vaddw_dv(v128, v128);
// CHECK: @llvm.hexagon.V6.vaddwnq
__builtin_HEXAGON_V6_vaddwnq(q64, v64, v64);
__builtin_HEXAGON_V6_vaddwnq(__builtin_HEXAGON_V6_vandvrt(q64, -1), v64, v64);
// CHECK: @llvm.hexagon.V6.vaddwq
__builtin_HEXAGON_V6_vaddwq(q64, v64, v64);
__builtin_HEXAGON_V6_vaddwq(__builtin_HEXAGON_V6_vandvrt(q64, -1), v64, v64);
// CHECK: @llvm.hexagon.V6.vaddwsat
__builtin_HEXAGON_V6_vaddwsat(v64, v64);
// CHECK: @llvm.hexagon.V6.vaddwsat.dv
@ -141,21 +152,21 @@ void test() {
// CHECK: @llvm.hexagon.V6.vand
__builtin_HEXAGON_V6_vand(v64, v64);
// CHECK: @llvm.hexagon.V6.vandnqrt
__builtin_HEXAGON_V6_vandnqrt(q64, 0);
__builtin_HEXAGON_V6_vandnqrt(__builtin_HEXAGON_V6_vandvrt(q64, -1), 0);
// CHECK: @llvm.hexagon.V6.vandnqrt.acc
__builtin_HEXAGON_V6_vandnqrt_acc(v64, q64, 0);
__builtin_HEXAGON_V6_vandnqrt_acc(v64, __builtin_HEXAGON_V6_vandvrt(q64, -1), 0);
// CHECK: @llvm.hexagon.V6.vandqrt
__builtin_HEXAGON_V6_vandqrt(q64, 0);
__builtin_HEXAGON_V6_vandqrt(__builtin_HEXAGON_V6_vandvrt(q64, -1), 0);
// CHECK: @llvm.hexagon.V6.vandqrt.acc
__builtin_HEXAGON_V6_vandqrt_acc(v64, q64, 0);
__builtin_HEXAGON_V6_vandqrt_acc(v64, __builtin_HEXAGON_V6_vandvrt(q64, -1), 0);
// CHECK: @llvm.hexagon.V6.vandvnqv
__builtin_HEXAGON_V6_vandvnqv(q64, v64);
__builtin_HEXAGON_V6_vandvnqv(__builtin_HEXAGON_V6_vandvrt(q64, -1), v64);
// CHECK: @llvm.hexagon.V6.vandvqv
__builtin_HEXAGON_V6_vandvqv(q64, v64);
__builtin_HEXAGON_V6_vandvqv(__builtin_HEXAGON_V6_vandvrt(q64, -1), v64);
// CHECK: @llvm.hexagon.V6.vandvrt
__builtin_HEXAGON_V6_vandvrt(v64, 0);
// CHECK: @llvm.hexagon.V6.vandvrt.acc
__builtin_HEXAGON_V6_vandvrt_acc(q64, v64, 0);
__builtin_HEXAGON_V6_vandvrt_acc(__builtin_HEXAGON_V6_vandvrt(q64, -1), v64, 0);
// CHECK: @llvm.hexagon.V6.vaslh
__builtin_HEXAGON_V6_vaslh(v64, 0);
// CHECK: @llvm.hexagon.V6.vaslh.acc
@ -297,87 +308,87 @@ void test() {
// CHECK: @llvm.hexagon.V6.veqb
__builtin_HEXAGON_V6_veqb(v64, v64);
// CHECK: @llvm.hexagon.V6.veqb.and
__builtin_HEXAGON_V6_veqb_and(q64, v64, v64);
__builtin_HEXAGON_V6_veqb_and(__builtin_HEXAGON_V6_vandvrt(q64, -1), v64, v64);
// CHECK: @llvm.hexagon.V6.veqb.or
__builtin_HEXAGON_V6_veqb_or(q64, v64, v64);
__builtin_HEXAGON_V6_veqb_or(__builtin_HEXAGON_V6_vandvrt(q64, -1), v64, v64);
// CHECK: @llvm.hexagon.V6.veqb.xor
__builtin_HEXAGON_V6_veqb_xor(q64, v64, v64);
__builtin_HEXAGON_V6_veqb_xor(__builtin_HEXAGON_V6_vandvrt(q64, -1), v64, v64);
// CHECK: @llvm.hexagon.V6.veqh
__builtin_HEXAGON_V6_veqh(v64, v64);
// CHECK: @llvm.hexagon.V6.veqh.and
__builtin_HEXAGON_V6_veqh_and(q64, v64, v64);
__builtin_HEXAGON_V6_veqh_and(__builtin_HEXAGON_V6_vandvrt(q64, -1), v64, v64);
// CHECK: @llvm.hexagon.V6.veqh.or
__builtin_HEXAGON_V6_veqh_or(q64, v64, v64);
__builtin_HEXAGON_V6_veqh_or(__builtin_HEXAGON_V6_vandvrt(q64, -1), v64, v64);
// CHECK: @llvm.hexagon.V6.veqh.xor
__builtin_HEXAGON_V6_veqh_xor(q64, v64, v64);
__builtin_HEXAGON_V6_veqh_xor(__builtin_HEXAGON_V6_vandvrt(q64, -1), v64, v64);
// CHECK: @llvm.hexagon.V6.veqw
__builtin_HEXAGON_V6_veqw(v64, v64);
// CHECK: @llvm.hexagon.V6.veqw.and
__builtin_HEXAGON_V6_veqw_and(q64, v64, v64);
__builtin_HEXAGON_V6_veqw_and(__builtin_HEXAGON_V6_vandvrt(q64, -1), v64, v64);
// CHECK: @llvm.hexagon.V6.veqw.or
__builtin_HEXAGON_V6_veqw_or(q64, v64, v64);
__builtin_HEXAGON_V6_veqw_or(__builtin_HEXAGON_V6_vandvrt(q64, -1), v64, v64);
// CHECK: @llvm.hexagon.V6.veqw.xor
__builtin_HEXAGON_V6_veqw_xor(q64, v64, v64);
__builtin_HEXAGON_V6_veqw_xor(__builtin_HEXAGON_V6_vandvrt(q64, -1), v64, v64);
// CHECK: @llvm.hexagon.V6.vgathermh
__builtin_HEXAGON_V6_vgathermh(0, 0, 0, v64);
// CHECK: @llvm.hexagon.V6.vgathermhq
__builtin_HEXAGON_V6_vgathermhq(0, q64, 0, 0, v64);
__builtin_HEXAGON_V6_vgathermhq(0, __builtin_HEXAGON_V6_vandvrt(q64, -1), 0, 0, v64);
// CHECK: @llvm.hexagon.V6.vgathermhw
__builtin_HEXAGON_V6_vgathermhw(0, 0, 0, v128);
// CHECK: @llvm.hexagon.V6.vgathermhwq
__builtin_HEXAGON_V6_vgathermhwq(0, q64, 0, 0, v128);
__builtin_HEXAGON_V6_vgathermhwq(0, __builtin_HEXAGON_V6_vandvrt(q64, -1), 0, 0, v128);
// CHECK: @llvm.hexagon.V6.vgathermw
__builtin_HEXAGON_V6_vgathermw(0, 0, 0, v64);
// CHECK: @llvm.hexagon.V6.vgathermwq
__builtin_HEXAGON_V6_vgathermwq(0, q64, 0, 0, v64);
__builtin_HEXAGON_V6_vgathermwq(0, __builtin_HEXAGON_V6_vandvrt(q64, -1), 0, 0, v64);
// CHECK: @llvm.hexagon.V6.vgtb
__builtin_HEXAGON_V6_vgtb(v64, v64);
// CHECK: @llvm.hexagon.V6.vgtb.and
__builtin_HEXAGON_V6_vgtb_and(q64, v64, v64);
__builtin_HEXAGON_V6_vgtb_and(__builtin_HEXAGON_V6_vandvrt(q64, -1), v64, v64);
// CHECK: @llvm.hexagon.V6.vgtb.or
__builtin_HEXAGON_V6_vgtb_or(q64, v64, v64);
__builtin_HEXAGON_V6_vgtb_or(__builtin_HEXAGON_V6_vandvrt(q64, -1), v64, v64);
// CHECK: @llvm.hexagon.V6.vgtb.xor
__builtin_HEXAGON_V6_vgtb_xor(q64, v64, v64);
__builtin_HEXAGON_V6_vgtb_xor(__builtin_HEXAGON_V6_vandvrt(q64, -1), v64, v64);
// CHECK: @llvm.hexagon.V6.vgth
__builtin_HEXAGON_V6_vgth(v64, v64);
// CHECK: @llvm.hexagon.V6.vgth.and
__builtin_HEXAGON_V6_vgth_and(q64, v64, v64);
__builtin_HEXAGON_V6_vgth_and(__builtin_HEXAGON_V6_vandvrt(q64, -1), v64, v64);
// CHECK: @llvm.hexagon.V6.vgth.or
__builtin_HEXAGON_V6_vgth_or(q64, v64, v64);
__builtin_HEXAGON_V6_vgth_or(__builtin_HEXAGON_V6_vandvrt(q64, -1), v64, v64);
// CHECK: @llvm.hexagon.V6.vgth.xor
__builtin_HEXAGON_V6_vgth_xor(q64, v64, v64);
__builtin_HEXAGON_V6_vgth_xor(__builtin_HEXAGON_V6_vandvrt(q64, -1), v64, v64);
// CHECK: @llvm.hexagon.V6.vgtub
__builtin_HEXAGON_V6_vgtub(v64, v64);
// CHECK: @llvm.hexagon.V6.vgtub.and
__builtin_HEXAGON_V6_vgtub_and(q64, v64, v64);
__builtin_HEXAGON_V6_vgtub_and(__builtin_HEXAGON_V6_vandvrt(q64, -1), v64, v64);
// CHECK: @llvm.hexagon.V6.vgtub.or
__builtin_HEXAGON_V6_vgtub_or(q64, v64, v64);
__builtin_HEXAGON_V6_vgtub_or(__builtin_HEXAGON_V6_vandvrt(q64, -1), v64, v64);
// CHECK: @llvm.hexagon.V6.vgtub.xor
__builtin_HEXAGON_V6_vgtub_xor(q64, v64, v64);
__builtin_HEXAGON_V6_vgtub_xor(__builtin_HEXAGON_V6_vandvrt(q64, -1), v64, v64);
// CHECK: @llvm.hexagon.V6.vgtuh
__builtin_HEXAGON_V6_vgtuh(v64, v64);
// CHECK: @llvm.hexagon.V6.vgtuh.and
__builtin_HEXAGON_V6_vgtuh_and(q64, v64, v64);
__builtin_HEXAGON_V6_vgtuh_and(__builtin_HEXAGON_V6_vandvrt(q64, -1), v64, v64);
// CHECK: @llvm.hexagon.V6.vgtuh.or
__builtin_HEXAGON_V6_vgtuh_or(q64, v64, v64);
__builtin_HEXAGON_V6_vgtuh_or(__builtin_HEXAGON_V6_vandvrt(q64, -1), v64, v64);
// CHECK: @llvm.hexagon.V6.vgtuh.xor
__builtin_HEXAGON_V6_vgtuh_xor(q64, v64, v64);
__builtin_HEXAGON_V6_vgtuh_xor(__builtin_HEXAGON_V6_vandvrt(q64, -1), v64, v64);
// CHECK: @llvm.hexagon.V6.vgtuw
__builtin_HEXAGON_V6_vgtuw(v64, v64);
// CHECK: @llvm.hexagon.V6.vgtuw.and
__builtin_HEXAGON_V6_vgtuw_and(q64, v64, v64);
__builtin_HEXAGON_V6_vgtuw_and(__builtin_HEXAGON_V6_vandvrt(q64, -1), v64, v64);
// CHECK: @llvm.hexagon.V6.vgtuw.or
__builtin_HEXAGON_V6_vgtuw_or(q64, v64, v64);
__builtin_HEXAGON_V6_vgtuw_or(__builtin_HEXAGON_V6_vandvrt(q64, -1), v64, v64);
// CHECK: @llvm.hexagon.V6.vgtuw.xor
__builtin_HEXAGON_V6_vgtuw_xor(q64, v64, v64);
__builtin_HEXAGON_V6_vgtuw_xor(__builtin_HEXAGON_V6_vandvrt(q64, -1), v64, v64);
// CHECK: @llvm.hexagon.V6.vgtw
__builtin_HEXAGON_V6_vgtw(v64, v64);
// CHECK: @llvm.hexagon.V6.vgtw.and
__builtin_HEXAGON_V6_vgtw_and(q64, v64, v64);
__builtin_HEXAGON_V6_vgtw_and(__builtin_HEXAGON_V6_vandvrt(q64, -1), v64, v64);
// CHECK: @llvm.hexagon.V6.vgtw.or
__builtin_HEXAGON_V6_vgtw_or(q64, v64, v64);
__builtin_HEXAGON_V6_vgtw_or(__builtin_HEXAGON_V6_vandvrt(q64, -1), v64, v64);
// CHECK: @llvm.hexagon.V6.vgtw.xor
__builtin_HEXAGON_V6_vgtw_xor(q64, v64, v64);
__builtin_HEXAGON_V6_vgtw_xor(__builtin_HEXAGON_V6_vandvrt(q64, -1), v64, v64);
// CHECK: @llvm.hexagon.V6.vinsertwr
__builtin_HEXAGON_V6_vinsertwr(v64, 0);
// CHECK: @llvm.hexagon.V6.vlalignb
@ -416,14 +427,6 @@ void test() {
__builtin_HEXAGON_V6_vlutvwh_oracci(v128, v64, v64, 0);
// CHECK: @llvm.hexagon.V6.vlutvwhi
__builtin_HEXAGON_V6_vlutvwhi(v64, v64, 0);
// CHECK: @llvm.hexagon.V6.vmaskedstorenq
__builtin_HEXAGON_V6_vmaskedstorenq(q64, 0, v64);
// CHECK: @llvm.hexagon.V6.vmaskedstorentnq
__builtin_HEXAGON_V6_vmaskedstorentnq(q64, 0, v64);
// CHECK: @llvm.hexagon.V6.vmaskedstorentq
__builtin_HEXAGON_V6_vmaskedstorentq(q64, 0, v64);
// CHECK: @llvm.hexagon.V6.vmaskedstoreq
__builtin_HEXAGON_V6_vmaskedstoreq(q64, 0, v64);
// CHECK: @llvm.hexagon.V6.vmaxb
__builtin_HEXAGON_V6_vmaxb(v64, v64);
// CHECK: @llvm.hexagon.V6.vmaxh
@ -567,7 +570,7 @@ void test() {
// CHECK: @llvm.hexagon.V6.vmpyuhv.acc
__builtin_HEXAGON_V6_vmpyuhv_acc(v128, v64, v64);
// CHECK: @llvm.hexagon.V6.vmux
__builtin_HEXAGON_V6_vmux(q64, v64, v64);
__builtin_HEXAGON_V6_vmux(__builtin_HEXAGON_V6_vandvrt(q64, -1), v64, v64);
// CHECK: @llvm.hexagon.V6.vnavgb
__builtin_HEXAGON_V6_vnavgb(v64, v64);
// CHECK: @llvm.hexagon.V6.vnavgh
@ -603,11 +606,11 @@ void test() {
// CHECK: @llvm.hexagon.V6.vpopcounth
__builtin_HEXAGON_V6_vpopcounth(v64);
// CHECK: @llvm.hexagon.V6.vprefixqb
__builtin_HEXAGON_V6_vprefixqb(q64);
__builtin_HEXAGON_V6_vprefixqb(__builtin_HEXAGON_V6_vandvrt(q64, -1));
// CHECK: @llvm.hexagon.V6.vprefixqh
__builtin_HEXAGON_V6_vprefixqh(q64);
__builtin_HEXAGON_V6_vprefixqh(__builtin_HEXAGON_V6_vandvrt(q64, -1));
// CHECK: @llvm.hexagon.V6.vprefixqw
__builtin_HEXAGON_V6_vprefixqw(q64);
__builtin_HEXAGON_V6_vprefixqw(__builtin_HEXAGON_V6_vandvrt(q64, -1));
// CHECK: @llvm.hexagon.V6.vrdelta
__builtin_HEXAGON_V6_vrdelta(v64, v64);
// CHECK: @llvm.hexagon.V6.vrmpybub.rtt
@ -677,19 +680,19 @@ void test() {
// CHECK: @llvm.hexagon.V6.vscattermh.add
__builtin_HEXAGON_V6_vscattermh_add(0, 0, v64, v64);
// CHECK: @llvm.hexagon.V6.vscattermhq
__builtin_HEXAGON_V6_vscattermhq(q64, 0, 0, v64, v64);
__builtin_HEXAGON_V6_vscattermhq(__builtin_HEXAGON_V6_vandvrt(q64, -1), 0, 0, v64, v64);
// CHECK: @llvm.hexagon.V6.vscattermhw
__builtin_HEXAGON_V6_vscattermhw(0, 0, v128, v64);
// CHECK: @llvm.hexagon.V6.vscattermhw.add
__builtin_HEXAGON_V6_vscattermhw_add(0, 0, v128, v64);
// CHECK: @llvm.hexagon.V6.vscattermhwq
__builtin_HEXAGON_V6_vscattermhwq(q64, 0, 0, v128, v64);
__builtin_HEXAGON_V6_vscattermhwq(__builtin_HEXAGON_V6_vandvrt(q64, -1), 0, 0, v128, v64);
// CHECK: @llvm.hexagon.V6.vscattermw
__builtin_HEXAGON_V6_vscattermw(0, 0, v64, v64);
// CHECK: @llvm.hexagon.V6.vscattermw.add
__builtin_HEXAGON_V6_vscattermw_add(0, 0, v64, v64);
// CHECK: @llvm.hexagon.V6.vscattermwq
__builtin_HEXAGON_V6_vscattermwq(q64, 0, 0, v64, v64);
__builtin_HEXAGON_V6_vscattermwq(__builtin_HEXAGON_V6_vandvrt(q64, -1), 0, 0, v64, v64);
// CHECK: @llvm.hexagon.V6.vsh
__builtin_HEXAGON_V6_vsh(v64);
// CHECK: @llvm.hexagon.V6.vshufeh
@ -715,9 +718,9 @@ void test() {
// CHECK: @llvm.hexagon.V6.vsubb.dv
__builtin_HEXAGON_V6_vsubb_dv(v128, v128);
// CHECK: @llvm.hexagon.V6.vsubbnq
__builtin_HEXAGON_V6_vsubbnq(q64, v64, v64);
__builtin_HEXAGON_V6_vsubbnq(__builtin_HEXAGON_V6_vandvrt(q64, -1), v64, v64);
// CHECK: @llvm.hexagon.V6.vsubbq
__builtin_HEXAGON_V6_vsubbq(q64, v64, v64);
__builtin_HEXAGON_V6_vsubbq(__builtin_HEXAGON_V6_vandvrt(q64, -1), v64, v64);
// CHECK: @llvm.hexagon.V6.vsubbsat
__builtin_HEXAGON_V6_vsubbsat(v64, v64);
// CHECK: @llvm.hexagon.V6.vsubbsat.dv
@ -729,9 +732,9 @@ void test() {
// CHECK: @llvm.hexagon.V6.vsubh.dv
__builtin_HEXAGON_V6_vsubh_dv(v128, v128);
// CHECK: @llvm.hexagon.V6.vsubhnq
__builtin_HEXAGON_V6_vsubhnq(q64, v64, v64);
__builtin_HEXAGON_V6_vsubhnq(__builtin_HEXAGON_V6_vandvrt(q64, -1), v64, v64);
// CHECK: @llvm.hexagon.V6.vsubhq
__builtin_HEXAGON_V6_vsubhq(q64, v64, v64);
__builtin_HEXAGON_V6_vsubhq(__builtin_HEXAGON_V6_vandvrt(q64, -1), v64, v64);
// CHECK: @llvm.hexagon.V6.vsubhsat
__builtin_HEXAGON_V6_vsubhsat(v64, v64);
// CHECK: @llvm.hexagon.V6.vsubhsat.dv
@ -761,15 +764,15 @@ void test() {
// CHECK: @llvm.hexagon.V6.vsubw.dv
__builtin_HEXAGON_V6_vsubw_dv(v128, v128);
// CHECK: @llvm.hexagon.V6.vsubwnq
__builtin_HEXAGON_V6_vsubwnq(q64, v64, v64);
__builtin_HEXAGON_V6_vsubwnq(__builtin_HEXAGON_V6_vandvrt(q64, -1), v64, v64);
// CHECK: @llvm.hexagon.V6.vsubwq
__builtin_HEXAGON_V6_vsubwq(q64, v64, v64);
__builtin_HEXAGON_V6_vsubwq(__builtin_HEXAGON_V6_vandvrt(q64, -1), v64, v64);
// CHECK: @llvm.hexagon.V6.vsubwsat
__builtin_HEXAGON_V6_vsubwsat(v64, v64);
// CHECK: @llvm.hexagon.V6.vsubwsat.dv
__builtin_HEXAGON_V6_vsubwsat_dv(v128, v128);
// CHECK: @llvm.hexagon.V6.vswap
__builtin_HEXAGON_V6_vswap(q64, v64, v64);
__builtin_HEXAGON_V6_vswap(__builtin_HEXAGON_V6_vandvrt(q64, -1), v64, v64);
// CHECK: @llvm.hexagon.V6.vtmpyb
__builtin_HEXAGON_V6_vtmpyb(v128, 0);
// CHECK: @llvm.hexagon.V6.vtmpyb.acc

File diff suppressed because it is too large Load Diff