diff --git a/clang/include/clang/Basic/riscv_vector_xtheadv.td b/clang/include/clang/Basic/riscv_vector_xtheadv.td index 0fd9b690c2f24a..fb79bbbb8e2617 100644 --- a/clang/include/clang/Basic/riscv_vector_xtheadv.td +++ b/clang/include/clang/Basic/riscv_vector_xtheadv.td @@ -187,6 +187,20 @@ let Log2LMUL = [-3, -2, -1, 0, 1, 2] in { ["wx", "Uv", "UvUwz"]]>; } +multiclass RVVSignedMaskOutBuiltinSet + : RVVOp0Op1BuiltinSet; + +multiclass RVVUnsignedMaskOutBuiltinSet + : RVVOp0Op1BuiltinSet; + +multiclass RVVIntMaskOutBuiltinSet + : RVVSignedMaskOutBuiltinSet, + RVVUnsignedMaskOutBuiltinSet; + //===----------------------------------------------------------------------===// // 6. Configuration-Setting and Utility //===----------------------------------------------------------------------===// @@ -1030,6 +1044,19 @@ let UnMaskedPolicyScheme = HasPassthruOperand in { } // 12.7. Vector Integer Comparison Operations +let MaskedPolicyScheme = HasPassthruOperand, + HasTailPolicy = false in { + defm th_vmseq : RVVIntMaskOutBuiltinSet; + defm th_vmsne : RVVIntMaskOutBuiltinSet; + defm th_vmsltu : RVVUnsignedMaskOutBuiltinSet; + defm th_vmslt : RVVSignedMaskOutBuiltinSet; + defm th_vmsleu : RVVUnsignedMaskOutBuiltinSet; + defm th_vmsle : RVVSignedMaskOutBuiltinSet; + defm th_vmsgtu : RVVUnsignedMaskOutBuiltinSet; + defm th_vmsgt : RVVSignedMaskOutBuiltinSet; + defm th_vmsgeu : RVVUnsignedMaskOutBuiltinSet; + defm th_vmsge : RVVSignedMaskOutBuiltinSet; +} // 12.8. Vector Integer Min/Max Operations let MaskedPolicyScheme = HasPassthruOperand, diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-compare/thead/vmseq.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-compare/thead/vmseq.c new file mode 100644 index 00000000000000..b6fdfd8a16384c --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-compare/thead/vmseq.c @@ -0,0 +1,1287 @@ +// RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ +// RUN: -disable-O0-optnone -emit-llvm %s -o - | \ +// RUN: opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i8m1_b8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv8i8.nxv8i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmseq_vv_i8m1_b8(vint8m1_t op1, vint8m1_t op2, size_t vl) { + return __riscv_th_vmseq_vv_i8m1_b8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i8m1_b8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv8i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmseq_vx_i8m1_b8(vint8m1_t op1, int8_t op2, size_t vl) { + return __riscv_th_vmseq_vx_i8m1_b8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i8m2_b4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv16i8.nxv16i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmseq_vv_i8m2_b4(vint8m2_t op1, vint8m2_t op2, size_t vl) { + return __riscv_th_vmseq_vv_i8m2_b4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i8m2_b4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv16i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmseq_vx_i8m2_b4(vint8m2_t op1, int8_t op2, size_t vl) { + return __riscv_th_vmseq_vx_i8m2_b4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i8m4_b2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv32i8.nxv32i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool2_t test_vmseq_vv_i8m4_b2(vint8m4_t op1, vint8m4_t op2, size_t vl) { + return __riscv_th_vmseq_vv_i8m4_b2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i8m4_b2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv32i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool2_t test_vmseq_vx_i8m4_b2(vint8m4_t op1, int8_t op2, size_t vl) { + return __riscv_th_vmseq_vx_i8m4_b2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i8m8_b1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv64i8.nxv64i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool1_t test_vmseq_vv_i8m8_b1(vint8m8_t op1, vint8m8_t op2, size_t vl) { + return __riscv_th_vmseq_vv_i8m8_b1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i8m8_b1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv64i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool1_t test_vmseq_vx_i8m8_b1(vint8m8_t op1, int8_t op2, size_t vl) { + return __riscv_th_vmseq_vx_i8m8_b1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i16m1_b16 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv4i16.nxv4i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmseq_vv_i16m1_b16(vint16m1_t op1, vint16m1_t op2, size_t vl) { + return __riscv_th_vmseq_vv_i16m1_b16(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i16m1_b16 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv4i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmseq_vx_i16m1_b16(vint16m1_t op1, int16_t op2, size_t vl) { + return __riscv_th_vmseq_vx_i16m1_b16(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i16m2_b8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv8i16.nxv8i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmseq_vv_i16m2_b8(vint16m2_t op1, vint16m2_t op2, size_t vl) { + return __riscv_th_vmseq_vv_i16m2_b8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i16m2_b8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv8i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmseq_vx_i16m2_b8(vint16m2_t op1, int16_t op2, size_t vl) { + return __riscv_th_vmseq_vx_i16m2_b8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i16m4_b4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv16i16.nxv16i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmseq_vv_i16m4_b4(vint16m4_t op1, vint16m4_t op2, size_t vl) { + return __riscv_th_vmseq_vv_i16m4_b4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i16m4_b4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv16i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmseq_vx_i16m4_b4(vint16m4_t op1, int16_t op2, size_t vl) { + return __riscv_th_vmseq_vx_i16m4_b4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i16m8_b2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv32i16.nxv32i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool2_t test_vmseq_vv_i16m8_b2(vint16m8_t op1, vint16m8_t op2, size_t vl) { + return __riscv_th_vmseq_vv_i16m8_b2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i16m8_b2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv32i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool2_t test_vmseq_vx_i16m8_b2(vint16m8_t op1, int16_t op2, size_t vl) { + return __riscv_th_vmseq_vx_i16m8_b2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i32m1_b32 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv2i32.nxv2i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool32_t test_vmseq_vv_i32m1_b32(vint32m1_t op1, vint32m1_t op2, size_t vl) { + return __riscv_th_vmseq_vv_i32m1_b32(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i32m1_b32 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv2i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool32_t test_vmseq_vx_i32m1_b32(vint32m1_t op1, int32_t op2, size_t vl) { + return __riscv_th_vmseq_vx_i32m1_b32(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i32m2_b16 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv4i32.nxv4i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmseq_vv_i32m2_b16(vint32m2_t op1, vint32m2_t op2, size_t vl) { + return __riscv_th_vmseq_vv_i32m2_b16(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i32m2_b16 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv4i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmseq_vx_i32m2_b16(vint32m2_t op1, int32_t op2, size_t vl) { + return __riscv_th_vmseq_vx_i32m2_b16(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i32m4_b8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv8i32.nxv8i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmseq_vv_i32m4_b8(vint32m4_t op1, vint32m4_t op2, size_t vl) { + return __riscv_th_vmseq_vv_i32m4_b8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i32m4_b8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv8i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmseq_vx_i32m4_b8(vint32m4_t op1, int32_t op2, size_t vl) { + return __riscv_th_vmseq_vx_i32m4_b8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i32m8_b4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv16i32.nxv16i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmseq_vv_i32m8_b4(vint32m8_t op1, vint32m8_t op2, size_t vl) { + return __riscv_th_vmseq_vv_i32m8_b4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i32m8_b4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv16i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmseq_vx_i32m8_b4(vint32m8_t op1, int32_t op2, size_t vl) { + return __riscv_th_vmseq_vx_i32m8_b4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i64m1_b64 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv1i64.nxv1i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool64_t test_vmseq_vv_i64m1_b64(vint64m1_t op1, vint64m1_t op2, size_t vl) { + return __riscv_th_vmseq_vv_i64m1_b64(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i64m1_b64 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv1i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool64_t test_vmseq_vx_i64m1_b64(vint64m1_t op1, int64_t op2, size_t vl) { + return __riscv_th_vmseq_vx_i64m1_b64(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i64m2_b32 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv2i64.nxv2i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool32_t test_vmseq_vv_i64m2_b32(vint64m2_t op1, vint64m2_t op2, size_t vl) { + return __riscv_th_vmseq_vv_i64m2_b32(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i64m2_b32 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv2i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool32_t test_vmseq_vx_i64m2_b32(vint64m2_t op1, int64_t op2, size_t vl) { + return __riscv_th_vmseq_vx_i64m2_b32(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i64m4_b16 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv4i64.nxv4i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmseq_vv_i64m4_b16(vint64m4_t op1, vint64m4_t op2, size_t vl) { + return __riscv_th_vmseq_vv_i64m4_b16(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i64m4_b16 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv4i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmseq_vx_i64m4_b16(vint64m4_t op1, int64_t op2, size_t vl) { + return __riscv_th_vmseq_vx_i64m4_b16(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i64m8_b8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv8i64.nxv8i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmseq_vv_i64m8_b8(vint64m8_t op1, vint64m8_t op2, size_t vl) { + return __riscv_th_vmseq_vv_i64m8_b8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i64m8_b8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv8i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmseq_vx_i64m8_b8(vint64m8_t op1, int64_t op2, size_t vl) { + return __riscv_th_vmseq_vx_i64m8_b8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u8m1_b8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv8i8.nxv8i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmseq_vv_u8m1_b8(vuint8m1_t op1, vuint8m1_t op2, size_t vl) { + return __riscv_th_vmseq_vv_u8m1_b8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u8m1_b8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv8i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmseq_vx_u8m1_b8(vuint8m1_t op1, uint8_t op2, size_t vl) { + return __riscv_th_vmseq_vx_u8m1_b8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u8m2_b4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv16i8.nxv16i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmseq_vv_u8m2_b4(vuint8m2_t op1, vuint8m2_t op2, size_t vl) { + return __riscv_th_vmseq_vv_u8m2_b4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u8m2_b4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv16i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmseq_vx_u8m2_b4(vuint8m2_t op1, uint8_t op2, size_t vl) { + return __riscv_th_vmseq_vx_u8m2_b4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u8m4_b2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv32i8.nxv32i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool2_t test_vmseq_vv_u8m4_b2(vuint8m4_t op1, vuint8m4_t op2, size_t vl) { + return __riscv_th_vmseq_vv_u8m4_b2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u8m4_b2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv32i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool2_t test_vmseq_vx_u8m4_b2(vuint8m4_t op1, uint8_t op2, size_t vl) { + return __riscv_th_vmseq_vx_u8m4_b2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u8m8_b1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv64i8.nxv64i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool1_t test_vmseq_vv_u8m8_b1(vuint8m8_t op1, vuint8m8_t op2, size_t vl) { + return __riscv_th_vmseq_vv_u8m8_b1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u8m8_b1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv64i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool1_t test_vmseq_vx_u8m8_b1(vuint8m8_t op1, uint8_t op2, size_t vl) { + return __riscv_th_vmseq_vx_u8m8_b1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u16m1_b16 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv4i16.nxv4i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmseq_vv_u16m1_b16(vuint16m1_t op1, vuint16m1_t op2, size_t vl) { + return __riscv_th_vmseq_vv_u16m1_b16(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u16m1_b16 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv4i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmseq_vx_u16m1_b16(vuint16m1_t op1, uint16_t op2, size_t vl) { + return __riscv_th_vmseq_vx_u16m1_b16(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u16m2_b8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv8i16.nxv8i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmseq_vv_u16m2_b8(vuint16m2_t op1, vuint16m2_t op2, size_t vl) { + return __riscv_th_vmseq_vv_u16m2_b8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u16m2_b8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv8i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmseq_vx_u16m2_b8(vuint16m2_t op1, uint16_t op2, size_t vl) { + return __riscv_th_vmseq_vx_u16m2_b8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u16m4_b4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv16i16.nxv16i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmseq_vv_u16m4_b4(vuint16m4_t op1, vuint16m4_t op2, size_t vl) { + return __riscv_th_vmseq_vv_u16m4_b4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u16m4_b4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv16i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmseq_vx_u16m4_b4(vuint16m4_t op1, uint16_t op2, size_t vl) { + return __riscv_th_vmseq_vx_u16m4_b4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u16m8_b2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv32i16.nxv32i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool2_t test_vmseq_vv_u16m8_b2(vuint16m8_t op1, vuint16m8_t op2, size_t vl) { + return __riscv_th_vmseq_vv_u16m8_b2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u16m8_b2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv32i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool2_t test_vmseq_vx_u16m8_b2(vuint16m8_t op1, uint16_t op2, size_t vl) { + return __riscv_th_vmseq_vx_u16m8_b2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u32m1_b32 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv2i32.nxv2i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool32_t test_vmseq_vv_u32m1_b32(vuint32m1_t op1, vuint32m1_t op2, size_t vl) { + return __riscv_th_vmseq_vv_u32m1_b32(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u32m1_b32 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv2i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool32_t test_vmseq_vx_u32m1_b32(vuint32m1_t op1, uint32_t op2, size_t vl) { + return __riscv_th_vmseq_vx_u32m1_b32(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u32m2_b16 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv4i32.nxv4i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmseq_vv_u32m2_b16(vuint32m2_t op1, vuint32m2_t op2, size_t vl) { + return __riscv_th_vmseq_vv_u32m2_b16(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u32m2_b16 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv4i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmseq_vx_u32m2_b16(vuint32m2_t op1, uint32_t op2, size_t vl) { + return __riscv_th_vmseq_vx_u32m2_b16(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u32m4_b8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv8i32.nxv8i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmseq_vv_u32m4_b8(vuint32m4_t op1, vuint32m4_t op2, size_t vl) { + return __riscv_th_vmseq_vv_u32m4_b8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u32m4_b8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv8i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmseq_vx_u32m4_b8(vuint32m4_t op1, uint32_t op2, size_t vl) { + return __riscv_th_vmseq_vx_u32m4_b8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u32m8_b4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv16i32.nxv16i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmseq_vv_u32m8_b4(vuint32m8_t op1, vuint32m8_t op2, size_t vl) { + return __riscv_th_vmseq_vv_u32m8_b4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u32m8_b4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv16i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmseq_vx_u32m8_b4(vuint32m8_t op1, uint32_t op2, size_t vl) { + return __riscv_th_vmseq_vx_u32m8_b4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u64m1_b64 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv1i64.nxv1i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool64_t test_vmseq_vv_u64m1_b64(vuint64m1_t op1, vuint64m1_t op2, size_t vl) { + return __riscv_th_vmseq_vv_u64m1_b64(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u64m1_b64 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv1i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool64_t test_vmseq_vx_u64m1_b64(vuint64m1_t op1, uint64_t op2, size_t vl) { + return __riscv_th_vmseq_vx_u64m1_b64(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u64m2_b32 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv2i64.nxv2i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool32_t test_vmseq_vv_u64m2_b32(vuint64m2_t op1, vuint64m2_t op2, size_t vl) { + return __riscv_th_vmseq_vv_u64m2_b32(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u64m2_b32 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv2i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool32_t test_vmseq_vx_u64m2_b32(vuint64m2_t op1, uint64_t op2, size_t vl) { + return __riscv_th_vmseq_vx_u64m2_b32(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u64m4_b16 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv4i64.nxv4i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmseq_vv_u64m4_b16(vuint64m4_t op1, vuint64m4_t op2, size_t vl) { + return __riscv_th_vmseq_vv_u64m4_b16(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u64m4_b16 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv4i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmseq_vx_u64m4_b16(vuint64m4_t op1, uint64_t op2, size_t vl) { + return __riscv_th_vmseq_vx_u64m4_b16(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u64m8_b8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv8i64.nxv8i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmseq_vv_u64m8_b8(vuint64m8_t op1, vuint64m8_t op2, size_t vl) { + return __riscv_th_vmseq_vv_u64m8_b8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u64m8_b8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.nxv8i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmseq_vx_u64m8_b8(vuint64m8_t op1, uint64_t op2, size_t vl) { + return __riscv_th_vmseq_vx_u64m8_b8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i8m1_b8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmseq_vv_i8m1_b8_m(vbool8_t mask, vint8m1_t op1, vint8m1_t op2, size_t vl) { + return __riscv_th_vmseq_vv_i8m1_b8_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i8m1_b8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmseq_vx_i8m1_b8_m(vbool8_t mask, vint8m1_t op1, int8_t op2, size_t vl) { + return __riscv_th_vmseq_vx_i8m1_b8_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i8m2_b4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmseq_vv_i8m2_b4_m(vbool4_t mask, vint8m2_t op1, vint8m2_t op2, size_t vl) { + return __riscv_th_vmseq_vv_i8m2_b4_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i8m2_b4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmseq_vx_i8m2_b4_m(vbool4_t mask, vint8m2_t op1, int8_t op2, size_t vl) { + return __riscv_th_vmseq_vx_i8m2_b4_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i8m4_b2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool2_t test_vmseq_vv_i8m4_b2_m(vbool2_t mask, vint8m4_t op1, vint8m4_t op2, size_t vl) { + return __riscv_th_vmseq_vv_i8m4_b2_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i8m4_b2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool2_t test_vmseq_vx_i8m4_b2_m(vbool2_t mask, vint8m4_t op1, int8_t op2, size_t vl) { + return __riscv_th_vmseq_vx_i8m4_b2_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i8m8_b1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool1_t test_vmseq_vv_i8m8_b1_m(vbool1_t mask, vint8m8_t op1, vint8m8_t op2, size_t vl) { + return __riscv_th_vmseq_vv_i8m8_b1_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i8m8_b1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool1_t test_vmseq_vx_i8m8_b1_m(vbool1_t mask, vint8m8_t op1, int8_t op2, size_t vl) { + return __riscv_th_vmseq_vx_i8m8_b1_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i16m1_b16_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmseq_vv_i16m1_b16_m(vbool16_t mask, vint16m1_t op1, vint16m1_t op2, size_t vl) { + return __riscv_th_vmseq_vv_i16m1_b16_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i16m1_b16_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmseq_vx_i16m1_b16_m(vbool16_t mask, vint16m1_t op1, int16_t op2, size_t vl) { + return __riscv_th_vmseq_vx_i16m1_b16_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i16m2_b8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmseq_vv_i16m2_b8_m(vbool8_t mask, vint16m2_t op1, vint16m2_t op2, size_t vl) { + return __riscv_th_vmseq_vv_i16m2_b8_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i16m2_b8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmseq_vx_i16m2_b8_m(vbool8_t mask, vint16m2_t op1, int16_t op2, size_t vl) { + return __riscv_th_vmseq_vx_i16m2_b8_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i16m4_b4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmseq_vv_i16m4_b4_m(vbool4_t mask, vint16m4_t op1, vint16m4_t op2, size_t vl) { + return __riscv_th_vmseq_vv_i16m4_b4_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i16m4_b4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmseq_vx_i16m4_b4_m(vbool4_t mask, vint16m4_t op1, int16_t op2, size_t vl) { + return __riscv_th_vmseq_vx_i16m4_b4_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i16m8_b2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool2_t test_vmseq_vv_i16m8_b2_m(vbool2_t mask, vint16m8_t op1, vint16m8_t op2, size_t vl) { + return __riscv_th_vmseq_vv_i16m8_b2_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i16m8_b2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool2_t test_vmseq_vx_i16m8_b2_m(vbool2_t mask, vint16m8_t op1, int16_t op2, size_t vl) { + return __riscv_th_vmseq_vx_i16m8_b2_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i32m1_b32_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool32_t test_vmseq_vv_i32m1_b32_m(vbool32_t mask, vint32m1_t op1, vint32m1_t op2, size_t vl) { + return __riscv_th_vmseq_vv_i32m1_b32_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i32m1_b32_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool32_t test_vmseq_vx_i32m1_b32_m(vbool32_t mask, vint32m1_t op1, int32_t op2, size_t vl) { + return __riscv_th_vmseq_vx_i32m1_b32_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i32m2_b16_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmseq_vv_i32m2_b16_m(vbool16_t mask, vint32m2_t op1, vint32m2_t op2, size_t vl) { + return __riscv_th_vmseq_vv_i32m2_b16_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i32m2_b16_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmseq_vx_i32m2_b16_m(vbool16_t mask, vint32m2_t op1, int32_t op2, size_t vl) { + return __riscv_th_vmseq_vx_i32m2_b16_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i32m4_b8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmseq_vv_i32m4_b8_m(vbool8_t mask, vint32m4_t op1, vint32m4_t op2, size_t vl) { + return __riscv_th_vmseq_vv_i32m4_b8_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i32m4_b8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmseq_vx_i32m4_b8_m(vbool8_t mask, vint32m4_t op1, int32_t op2, size_t vl) { + return __riscv_th_vmseq_vx_i32m4_b8_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i32m8_b4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmseq_vv_i32m8_b4_m(vbool4_t mask, vint32m8_t op1, vint32m8_t op2, size_t vl) { + return __riscv_th_vmseq_vv_i32m8_b4_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i32m8_b4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmseq_vx_i32m8_b4_m(vbool4_t mask, vint32m8_t op1, int32_t op2, size_t vl) { + return __riscv_th_vmseq_vx_i32m8_b4_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i64m1_b64_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool64_t test_vmseq_vv_i64m1_b64_m(vbool64_t mask, vint64m1_t op1, vint64m1_t op2, size_t vl) { + return __riscv_th_vmseq_vv_i64m1_b64_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i64m1_b64_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool64_t test_vmseq_vx_i64m1_b64_m(vbool64_t mask, vint64m1_t op1, int64_t op2, size_t vl) { + return __riscv_th_vmseq_vx_i64m1_b64_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i64m2_b32_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool32_t test_vmseq_vv_i64m2_b32_m(vbool32_t mask, vint64m2_t op1, vint64m2_t op2, size_t vl) { + return __riscv_th_vmseq_vv_i64m2_b32_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i64m2_b32_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool32_t test_vmseq_vx_i64m2_b32_m(vbool32_t mask, vint64m2_t op1, int64_t op2, size_t vl) { + return __riscv_th_vmseq_vx_i64m2_b32_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i64m4_b16_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmseq_vv_i64m4_b16_m(vbool16_t mask, vint64m4_t op1, vint64m4_t op2, size_t vl) { + return __riscv_th_vmseq_vv_i64m4_b16_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i64m4_b16_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmseq_vx_i64m4_b16_m(vbool16_t mask, vint64m4_t op1, int64_t op2, size_t vl) { + return __riscv_th_vmseq_vx_i64m4_b16_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_i64m8_b8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmseq_vv_i64m8_b8_m(vbool8_t mask, vint64m8_t op1, vint64m8_t op2, size_t vl) { + return __riscv_th_vmseq_vv_i64m8_b8_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_i64m8_b8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmseq_vx_i64m8_b8_m(vbool8_t mask, vint64m8_t op1, int64_t op2, size_t vl) { + return __riscv_th_vmseq_vx_i64m8_b8_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u8m1_b8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmseq_vv_u8m1_b8_m(vbool8_t mask, vuint8m1_t op1, vuint8m1_t op2, size_t vl) { + return __riscv_th_vmseq_vv_u8m1_b8_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u8m1_b8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmseq_vx_u8m1_b8_m(vbool8_t mask, vuint8m1_t op1, uint8_t op2, size_t vl) { + return __riscv_th_vmseq_vx_u8m1_b8_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u8m2_b4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmseq_vv_u8m2_b4_m(vbool4_t mask, vuint8m2_t op1, vuint8m2_t op2, size_t vl) { + return __riscv_th_vmseq_vv_u8m2_b4_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u8m2_b4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmseq_vx_u8m2_b4_m(vbool4_t mask, vuint8m2_t op1, uint8_t op2, size_t vl) { + return __riscv_th_vmseq_vx_u8m2_b4_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u8m4_b2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool2_t test_vmseq_vv_u8m4_b2_m(vbool2_t mask, vuint8m4_t op1, vuint8m4_t op2, size_t vl) { + return __riscv_th_vmseq_vv_u8m4_b2_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u8m4_b2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool2_t test_vmseq_vx_u8m4_b2_m(vbool2_t mask, vuint8m4_t op1, uint8_t op2, size_t vl) { + return __riscv_th_vmseq_vx_u8m4_b2_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u8m8_b1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool1_t test_vmseq_vv_u8m8_b1_m(vbool1_t mask, vuint8m8_t op1, vuint8m8_t op2, size_t vl) { + return __riscv_th_vmseq_vv_u8m8_b1_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u8m8_b1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool1_t test_vmseq_vx_u8m8_b1_m(vbool1_t mask, vuint8m8_t op1, uint8_t op2, size_t vl) { + return __riscv_th_vmseq_vx_u8m8_b1_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u16m1_b16_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmseq_vv_u16m1_b16_m(vbool16_t mask, vuint16m1_t op1, vuint16m1_t op2, size_t vl) { + return __riscv_th_vmseq_vv_u16m1_b16_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u16m1_b16_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmseq_vx_u16m1_b16_m(vbool16_t mask, vuint16m1_t op1, uint16_t op2, size_t vl) { + return __riscv_th_vmseq_vx_u16m1_b16_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u16m2_b8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmseq_vv_u16m2_b8_m(vbool8_t mask, vuint16m2_t op1, vuint16m2_t op2, size_t vl) { + return __riscv_th_vmseq_vv_u16m2_b8_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u16m2_b8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmseq_vx_u16m2_b8_m(vbool8_t mask, vuint16m2_t op1, uint16_t op2, size_t vl) { + return __riscv_th_vmseq_vx_u16m2_b8_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u16m4_b4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmseq_vv_u16m4_b4_m(vbool4_t mask, vuint16m4_t op1, vuint16m4_t op2, size_t vl) { + return __riscv_th_vmseq_vv_u16m4_b4_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u16m4_b4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmseq_vx_u16m4_b4_m(vbool4_t mask, vuint16m4_t op1, uint16_t op2, size_t vl) { + return __riscv_th_vmseq_vx_u16m4_b4_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u16m8_b2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool2_t test_vmseq_vv_u16m8_b2_m(vbool2_t mask, vuint16m8_t op1, vuint16m8_t op2, size_t vl) { + return __riscv_th_vmseq_vv_u16m8_b2_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u16m8_b2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool2_t test_vmseq_vx_u16m8_b2_m(vbool2_t mask, vuint16m8_t op1, uint16_t op2, size_t vl) { + return __riscv_th_vmseq_vx_u16m8_b2_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u32m1_b32_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool32_t test_vmseq_vv_u32m1_b32_m(vbool32_t mask, vuint32m1_t op1, vuint32m1_t op2, size_t vl) { + return __riscv_th_vmseq_vv_u32m1_b32_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u32m1_b32_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool32_t test_vmseq_vx_u32m1_b32_m(vbool32_t mask, vuint32m1_t op1, uint32_t op2, size_t vl) { + return __riscv_th_vmseq_vx_u32m1_b32_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u32m2_b16_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmseq_vv_u32m2_b16_m(vbool16_t mask, vuint32m2_t op1, vuint32m2_t op2, size_t vl) { + return __riscv_th_vmseq_vv_u32m2_b16_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u32m2_b16_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmseq_vx_u32m2_b16_m(vbool16_t mask, vuint32m2_t op1, uint32_t op2, size_t vl) { + return __riscv_th_vmseq_vx_u32m2_b16_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u32m4_b8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmseq_vv_u32m4_b8_m(vbool8_t mask, vuint32m4_t op1, vuint32m4_t op2, size_t vl) { + return __riscv_th_vmseq_vv_u32m4_b8_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u32m4_b8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmseq_vx_u32m4_b8_m(vbool8_t mask, vuint32m4_t op1, uint32_t op2, size_t vl) { + return __riscv_th_vmseq_vx_u32m4_b8_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u32m8_b4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmseq_vv_u32m8_b4_m(vbool4_t mask, vuint32m8_t op1, vuint32m8_t op2, size_t vl) { + return __riscv_th_vmseq_vv_u32m8_b4_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u32m8_b4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmseq_vx_u32m8_b4_m(vbool4_t mask, vuint32m8_t op1, uint32_t op2, size_t vl) { + return __riscv_th_vmseq_vx_u32m8_b4_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u64m1_b64_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool64_t test_vmseq_vv_u64m1_b64_m(vbool64_t mask, vuint64m1_t op1, vuint64m1_t op2, size_t vl) { + return __riscv_th_vmseq_vv_u64m1_b64_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u64m1_b64_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool64_t test_vmseq_vx_u64m1_b64_m(vbool64_t mask, vuint64m1_t op1, uint64_t op2, size_t vl) { + return __riscv_th_vmseq_vx_u64m1_b64_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u64m2_b32_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool32_t test_vmseq_vv_u64m2_b32_m(vbool32_t mask, vuint64m2_t op1, vuint64m2_t op2, size_t vl) { + return __riscv_th_vmseq_vv_u64m2_b32_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u64m2_b32_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool32_t test_vmseq_vx_u64m2_b32_m(vbool32_t mask, vuint64m2_t op1, uint64_t op2, size_t vl) { + return __riscv_th_vmseq_vx_u64m2_b32_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u64m4_b16_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmseq_vv_u64m4_b16_m(vbool16_t mask, vuint64m4_t op1, vuint64m4_t op2, size_t vl) { + return __riscv_th_vmseq_vv_u64m4_b16_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u64m4_b16_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmseq_vx_u64m4_b16_m(vbool16_t mask, vuint64m4_t op1, uint64_t op2, size_t vl) { + return __riscv_th_vmseq_vx_u64m4_b16_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vv_u64m8_b8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmseq_vv_u64m8_b8_m(vbool8_t mask, vuint64m8_t op1, vuint64m8_t op2, size_t vl) { + return __riscv_th_vmseq_vv_u64m8_b8_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmseq_vx_u64m8_b8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmseq.mask.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmseq_vx_u64m8_b8_m(vbool8_t mask, vuint64m8_t op1, uint64_t op2, size_t vl) { + return __riscv_th_vmseq_vx_u64m8_b8_m(mask, op1, op2, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-compare/thead/vmsge.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-compare/thead/vmsge.c new file mode 100644 index 00000000000000..8fcaeb15217de6 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-compare/thead/vmsge.c @@ -0,0 +1,647 @@ +// RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ +// RUN: -disable-O0-optnone -emit-llvm %s -o - | \ +// RUN: opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i8m1_b8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv8i8.nxv8i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsge_vv_i8m1_b8(vint8m1_t op1, vint8m1_t op2, size_t vl) { + return __riscv_th_vmsge_vv_i8m1_b8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i8m1_b8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv8i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsge_vx_i8m1_b8(vint8m1_t op1, int8_t op2, size_t vl) { + return __riscv_th_vmsge_vx_i8m1_b8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i8m2_b4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv16i8.nxv16i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmsge_vv_i8m2_b4(vint8m2_t op1, vint8m2_t op2, size_t vl) { + return __riscv_th_vmsge_vv_i8m2_b4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i8m2_b4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv16i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmsge_vx_i8m2_b4(vint8m2_t op1, int8_t op2, size_t vl) { + return __riscv_th_vmsge_vx_i8m2_b4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i8m4_b2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv32i8.nxv32i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool2_t test_vmsge_vv_i8m4_b2(vint8m4_t op1, vint8m4_t op2, size_t vl) { + return __riscv_th_vmsge_vv_i8m4_b2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i8m4_b2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv32i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool2_t test_vmsge_vx_i8m4_b2(vint8m4_t op1, int8_t op2, size_t vl) { + return __riscv_th_vmsge_vx_i8m4_b2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i8m8_b1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv64i8.nxv64i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool1_t test_vmsge_vv_i8m8_b1(vint8m8_t op1, vint8m8_t op2, size_t vl) { + return __riscv_th_vmsge_vv_i8m8_b1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i8m8_b1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv64i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool1_t test_vmsge_vx_i8m8_b1(vint8m8_t op1, int8_t op2, size_t vl) { + return __riscv_th_vmsge_vx_i8m8_b1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i16m1_b16 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv4i16.nxv4i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmsge_vv_i16m1_b16(vint16m1_t op1, vint16m1_t op2, size_t vl) { + return __riscv_th_vmsge_vv_i16m1_b16(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i16m1_b16 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv4i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmsge_vx_i16m1_b16(vint16m1_t op1, int16_t op2, size_t vl) { + return __riscv_th_vmsge_vx_i16m1_b16(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i16m2_b8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv8i16.nxv8i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsge_vv_i16m2_b8(vint16m2_t op1, vint16m2_t op2, size_t vl) { + return __riscv_th_vmsge_vv_i16m2_b8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i16m2_b8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv8i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsge_vx_i16m2_b8(vint16m2_t op1, int16_t op2, size_t vl) { + return __riscv_th_vmsge_vx_i16m2_b8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i16m4_b4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv16i16.nxv16i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmsge_vv_i16m4_b4(vint16m4_t op1, vint16m4_t op2, size_t vl) { + return __riscv_th_vmsge_vv_i16m4_b4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i16m4_b4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv16i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmsge_vx_i16m4_b4(vint16m4_t op1, int16_t op2, size_t vl) { + return __riscv_th_vmsge_vx_i16m4_b4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i16m8_b2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv32i16.nxv32i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool2_t test_vmsge_vv_i16m8_b2(vint16m8_t op1, vint16m8_t op2, size_t vl) { + return __riscv_th_vmsge_vv_i16m8_b2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i16m8_b2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv32i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool2_t test_vmsge_vx_i16m8_b2(vint16m8_t op1, int16_t op2, size_t vl) { + return __riscv_th_vmsge_vx_i16m8_b2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i32m1_b32 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv2i32.nxv2i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool32_t test_vmsge_vv_i32m1_b32(vint32m1_t op1, vint32m1_t op2, size_t vl) { + return __riscv_th_vmsge_vv_i32m1_b32(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i32m1_b32 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv2i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool32_t test_vmsge_vx_i32m1_b32(vint32m1_t op1, int32_t op2, size_t vl) { + return __riscv_th_vmsge_vx_i32m1_b32(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i32m2_b16 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv4i32.nxv4i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmsge_vv_i32m2_b16(vint32m2_t op1, vint32m2_t op2, size_t vl) { + return __riscv_th_vmsge_vv_i32m2_b16(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i32m2_b16 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv4i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmsge_vx_i32m2_b16(vint32m2_t op1, int32_t op2, size_t vl) { + return __riscv_th_vmsge_vx_i32m2_b16(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i32m4_b8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv8i32.nxv8i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsge_vv_i32m4_b8(vint32m4_t op1, vint32m4_t op2, size_t vl) { + return __riscv_th_vmsge_vv_i32m4_b8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i32m4_b8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv8i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsge_vx_i32m4_b8(vint32m4_t op1, int32_t op2, size_t vl) { + return __riscv_th_vmsge_vx_i32m4_b8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i32m8_b4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv16i32.nxv16i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmsge_vv_i32m8_b4(vint32m8_t op1, vint32m8_t op2, size_t vl) { + return __riscv_th_vmsge_vv_i32m8_b4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i32m8_b4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv16i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmsge_vx_i32m8_b4(vint32m8_t op1, int32_t op2, size_t vl) { + return __riscv_th_vmsge_vx_i32m8_b4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i64m1_b64 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv1i64.nxv1i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool64_t test_vmsge_vv_i64m1_b64(vint64m1_t op1, vint64m1_t op2, size_t vl) { + return __riscv_th_vmsge_vv_i64m1_b64(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i64m1_b64 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv1i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool64_t test_vmsge_vx_i64m1_b64(vint64m1_t op1, int64_t op2, size_t vl) { + return __riscv_th_vmsge_vx_i64m1_b64(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i64m2_b32 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv2i64.nxv2i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool32_t test_vmsge_vv_i64m2_b32(vint64m2_t op1, vint64m2_t op2, size_t vl) { + return __riscv_th_vmsge_vv_i64m2_b32(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i64m2_b32 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv2i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool32_t test_vmsge_vx_i64m2_b32(vint64m2_t op1, int64_t op2, size_t vl) { + return __riscv_th_vmsge_vx_i64m2_b32(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i64m4_b16 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv4i64.nxv4i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmsge_vv_i64m4_b16(vint64m4_t op1, vint64m4_t op2, size_t vl) { + return __riscv_th_vmsge_vv_i64m4_b16(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i64m4_b16 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv4i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmsge_vx_i64m4_b16(vint64m4_t op1, int64_t op2, size_t vl) { + return __riscv_th_vmsge_vx_i64m4_b16(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i64m8_b8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv8i64.nxv8i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsge_vv_i64m8_b8(vint64m8_t op1, vint64m8_t op2, size_t vl) { + return __riscv_th_vmsge_vv_i64m8_b8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i64m8_b8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.nxv8i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsge_vx_i64m8_b8(vint64m8_t op1, int64_t op2, size_t vl) { + return __riscv_th_vmsge_vx_i64m8_b8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i8m1_b8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsge_vv_i8m1_b8_m(vbool8_t mask, vint8m1_t op1, vint8m1_t op2, size_t vl) { + return __riscv_th_vmsge_vv_i8m1_b8_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i8m1_b8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsge_vx_i8m1_b8_m(vbool8_t mask, vint8m1_t op1, int8_t op2, size_t vl) { + return __riscv_th_vmsge_vx_i8m1_b8_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i8m2_b4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmsge_vv_i8m2_b4_m(vbool4_t mask, vint8m2_t op1, vint8m2_t op2, size_t vl) { + return __riscv_th_vmsge_vv_i8m2_b4_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i8m2_b4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmsge_vx_i8m2_b4_m(vbool4_t mask, vint8m2_t op1, int8_t op2, size_t vl) { + return __riscv_th_vmsge_vx_i8m2_b4_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i8m4_b2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool2_t test_vmsge_vv_i8m4_b2_m(vbool2_t mask, vint8m4_t op1, vint8m4_t op2, size_t vl) { + return __riscv_th_vmsge_vv_i8m4_b2_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i8m4_b2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool2_t test_vmsge_vx_i8m4_b2_m(vbool2_t mask, vint8m4_t op1, int8_t op2, size_t vl) { + return __riscv_th_vmsge_vx_i8m4_b2_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i8m8_b1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool1_t test_vmsge_vv_i8m8_b1_m(vbool1_t mask, vint8m8_t op1, vint8m8_t op2, size_t vl) { + return __riscv_th_vmsge_vv_i8m8_b1_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i8m8_b1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool1_t test_vmsge_vx_i8m8_b1_m(vbool1_t mask, vint8m8_t op1, int8_t op2, size_t vl) { + return __riscv_th_vmsge_vx_i8m8_b1_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i16m1_b16_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmsge_vv_i16m1_b16_m(vbool16_t mask, vint16m1_t op1, vint16m1_t op2, size_t vl) { + return __riscv_th_vmsge_vv_i16m1_b16_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i16m1_b16_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmsge_vx_i16m1_b16_m(vbool16_t mask, vint16m1_t op1, int16_t op2, size_t vl) { + return __riscv_th_vmsge_vx_i16m1_b16_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i16m2_b8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsge_vv_i16m2_b8_m(vbool8_t mask, vint16m2_t op1, vint16m2_t op2, size_t vl) { + return __riscv_th_vmsge_vv_i16m2_b8_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i16m2_b8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsge_vx_i16m2_b8_m(vbool8_t mask, vint16m2_t op1, int16_t op2, size_t vl) { + return __riscv_th_vmsge_vx_i16m2_b8_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i16m4_b4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmsge_vv_i16m4_b4_m(vbool4_t mask, vint16m4_t op1, vint16m4_t op2, size_t vl) { + return __riscv_th_vmsge_vv_i16m4_b4_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i16m4_b4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmsge_vx_i16m4_b4_m(vbool4_t mask, vint16m4_t op1, int16_t op2, size_t vl) { + return __riscv_th_vmsge_vx_i16m4_b4_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i16m8_b2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool2_t test_vmsge_vv_i16m8_b2_m(vbool2_t mask, vint16m8_t op1, vint16m8_t op2, size_t vl) { + return __riscv_th_vmsge_vv_i16m8_b2_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i16m8_b2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool2_t test_vmsge_vx_i16m8_b2_m(vbool2_t mask, vint16m8_t op1, int16_t op2, size_t vl) { + return __riscv_th_vmsge_vx_i16m8_b2_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i32m1_b32_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool32_t test_vmsge_vv_i32m1_b32_m(vbool32_t mask, vint32m1_t op1, vint32m1_t op2, size_t vl) { + return __riscv_th_vmsge_vv_i32m1_b32_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i32m1_b32_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool32_t test_vmsge_vx_i32m1_b32_m(vbool32_t mask, vint32m1_t op1, int32_t op2, size_t vl) { + return __riscv_th_vmsge_vx_i32m1_b32_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i32m2_b16_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmsge_vv_i32m2_b16_m(vbool16_t mask, vint32m2_t op1, vint32m2_t op2, size_t vl) { + return __riscv_th_vmsge_vv_i32m2_b16_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i32m2_b16_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmsge_vx_i32m2_b16_m(vbool16_t mask, vint32m2_t op1, int32_t op2, size_t vl) { + return __riscv_th_vmsge_vx_i32m2_b16_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i32m4_b8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsge_vv_i32m4_b8_m(vbool8_t mask, vint32m4_t op1, vint32m4_t op2, size_t vl) { + return __riscv_th_vmsge_vv_i32m4_b8_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i32m4_b8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsge_vx_i32m4_b8_m(vbool8_t mask, vint32m4_t op1, int32_t op2, size_t vl) { + return __riscv_th_vmsge_vx_i32m4_b8_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i32m8_b4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmsge_vv_i32m8_b4_m(vbool4_t mask, vint32m8_t op1, vint32m8_t op2, size_t vl) { + return __riscv_th_vmsge_vv_i32m8_b4_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i32m8_b4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmsge_vx_i32m8_b4_m(vbool4_t mask, vint32m8_t op1, int32_t op2, size_t vl) { + return __riscv_th_vmsge_vx_i32m8_b4_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i64m1_b64_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool64_t test_vmsge_vv_i64m1_b64_m(vbool64_t mask, vint64m1_t op1, vint64m1_t op2, size_t vl) { + return __riscv_th_vmsge_vv_i64m1_b64_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i64m1_b64_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool64_t test_vmsge_vx_i64m1_b64_m(vbool64_t mask, vint64m1_t op1, int64_t op2, size_t vl) { + return __riscv_th_vmsge_vx_i64m1_b64_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i64m2_b32_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool32_t test_vmsge_vv_i64m2_b32_m(vbool32_t mask, vint64m2_t op1, vint64m2_t op2, size_t vl) { + return __riscv_th_vmsge_vv_i64m2_b32_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i64m2_b32_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool32_t test_vmsge_vx_i64m2_b32_m(vbool32_t mask, vint64m2_t op1, int64_t op2, size_t vl) { + return __riscv_th_vmsge_vx_i64m2_b32_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i64m4_b16_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmsge_vv_i64m4_b16_m(vbool16_t mask, vint64m4_t op1, vint64m4_t op2, size_t vl) { + return __riscv_th_vmsge_vv_i64m4_b16_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i64m4_b16_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmsge_vx_i64m4_b16_m(vbool16_t mask, vint64m4_t op1, int64_t op2, size_t vl) { + return __riscv_th_vmsge_vx_i64m4_b16_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsge_vv_i64m8_b8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsge_vv_i64m8_b8_m(vbool8_t mask, vint64m8_t op1, vint64m8_t op2, size_t vl) { + return __riscv_th_vmsge_vv_i64m8_b8_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsge_vx_i64m8_b8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsge.mask.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsge_vx_i64m8_b8_m(vbool8_t mask, vint64m8_t op1, int64_t op2, size_t vl) { + return __riscv_th_vmsge_vx_i64m8_b8_m(mask, op1, op2, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-compare/thead/vmsgeu.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-compare/thead/vmsgeu.c new file mode 100644 index 00000000000000..b45905416e8441 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-compare/thead/vmsgeu.c @@ -0,0 +1,647 @@ +// RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ +// RUN: -disable-O0-optnone -emit-llvm %s -o - | \ +// RUN: opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u8m1_b8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv8i8.nxv8i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsgeu_vv_u8m1_b8(vuint8m1_t op1, vuint8m1_t op2, size_t vl) { + return __riscv_th_vmsgeu_vv_u8m1_b8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u8m1_b8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv8i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsgeu_vx_u8m1_b8(vuint8m1_t op1, uint8_t op2, size_t vl) { + return __riscv_th_vmsgeu_vx_u8m1_b8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u8m2_b4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv16i8.nxv16i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmsgeu_vv_u8m2_b4(vuint8m2_t op1, vuint8m2_t op2, size_t vl) { + return __riscv_th_vmsgeu_vv_u8m2_b4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u8m2_b4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv16i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmsgeu_vx_u8m2_b4(vuint8m2_t op1, uint8_t op2, size_t vl) { + return __riscv_th_vmsgeu_vx_u8m2_b4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u8m4_b2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv32i8.nxv32i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool2_t test_vmsgeu_vv_u8m4_b2(vuint8m4_t op1, vuint8m4_t op2, size_t vl) { + return __riscv_th_vmsgeu_vv_u8m4_b2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u8m4_b2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv32i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool2_t test_vmsgeu_vx_u8m4_b2(vuint8m4_t op1, uint8_t op2, size_t vl) { + return __riscv_th_vmsgeu_vx_u8m4_b2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u8m8_b1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv64i8.nxv64i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool1_t test_vmsgeu_vv_u8m8_b1(vuint8m8_t op1, vuint8m8_t op2, size_t vl) { + return __riscv_th_vmsgeu_vv_u8m8_b1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u8m8_b1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv64i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool1_t test_vmsgeu_vx_u8m8_b1(vuint8m8_t op1, uint8_t op2, size_t vl) { + return __riscv_th_vmsgeu_vx_u8m8_b1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u16m1_b16 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv4i16.nxv4i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmsgeu_vv_u16m1_b16(vuint16m1_t op1, vuint16m1_t op2, size_t vl) { + return __riscv_th_vmsgeu_vv_u16m1_b16(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u16m1_b16 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv4i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmsgeu_vx_u16m1_b16(vuint16m1_t op1, uint16_t op2, size_t vl) { + return __riscv_th_vmsgeu_vx_u16m1_b16(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u16m2_b8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv8i16.nxv8i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsgeu_vv_u16m2_b8(vuint16m2_t op1, vuint16m2_t op2, size_t vl) { + return __riscv_th_vmsgeu_vv_u16m2_b8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u16m2_b8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv8i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsgeu_vx_u16m2_b8(vuint16m2_t op1, uint16_t op2, size_t vl) { + return __riscv_th_vmsgeu_vx_u16m2_b8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u16m4_b4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv16i16.nxv16i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmsgeu_vv_u16m4_b4(vuint16m4_t op1, vuint16m4_t op2, size_t vl) { + return __riscv_th_vmsgeu_vv_u16m4_b4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u16m4_b4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv16i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmsgeu_vx_u16m4_b4(vuint16m4_t op1, uint16_t op2, size_t vl) { + return __riscv_th_vmsgeu_vx_u16m4_b4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u16m8_b2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv32i16.nxv32i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool2_t test_vmsgeu_vv_u16m8_b2(vuint16m8_t op1, vuint16m8_t op2, size_t vl) { + return __riscv_th_vmsgeu_vv_u16m8_b2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u16m8_b2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv32i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool2_t test_vmsgeu_vx_u16m8_b2(vuint16m8_t op1, uint16_t op2, size_t vl) { + return __riscv_th_vmsgeu_vx_u16m8_b2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u32m1_b32 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv2i32.nxv2i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool32_t test_vmsgeu_vv_u32m1_b32(vuint32m1_t op1, vuint32m1_t op2, size_t vl) { + return __riscv_th_vmsgeu_vv_u32m1_b32(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u32m1_b32 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv2i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool32_t test_vmsgeu_vx_u32m1_b32(vuint32m1_t op1, uint32_t op2, size_t vl) { + return __riscv_th_vmsgeu_vx_u32m1_b32(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u32m2_b16 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv4i32.nxv4i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmsgeu_vv_u32m2_b16(vuint32m2_t op1, vuint32m2_t op2, size_t vl) { + return __riscv_th_vmsgeu_vv_u32m2_b16(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u32m2_b16 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv4i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmsgeu_vx_u32m2_b16(vuint32m2_t op1, uint32_t op2, size_t vl) { + return __riscv_th_vmsgeu_vx_u32m2_b16(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u32m4_b8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv8i32.nxv8i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsgeu_vv_u32m4_b8(vuint32m4_t op1, vuint32m4_t op2, size_t vl) { + return __riscv_th_vmsgeu_vv_u32m4_b8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u32m4_b8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv8i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsgeu_vx_u32m4_b8(vuint32m4_t op1, uint32_t op2, size_t vl) { + return __riscv_th_vmsgeu_vx_u32m4_b8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u32m8_b4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv16i32.nxv16i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmsgeu_vv_u32m8_b4(vuint32m8_t op1, vuint32m8_t op2, size_t vl) { + return __riscv_th_vmsgeu_vv_u32m8_b4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u32m8_b4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv16i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmsgeu_vx_u32m8_b4(vuint32m8_t op1, uint32_t op2, size_t vl) { + return __riscv_th_vmsgeu_vx_u32m8_b4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u64m1_b64 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv1i64.nxv1i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool64_t test_vmsgeu_vv_u64m1_b64(vuint64m1_t op1, vuint64m1_t op2, size_t vl) { + return __riscv_th_vmsgeu_vv_u64m1_b64(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u64m1_b64 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv1i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool64_t test_vmsgeu_vx_u64m1_b64(vuint64m1_t op1, uint64_t op2, size_t vl) { + return __riscv_th_vmsgeu_vx_u64m1_b64(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u64m2_b32 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv2i64.nxv2i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool32_t test_vmsgeu_vv_u64m2_b32(vuint64m2_t op1, vuint64m2_t op2, size_t vl) { + return __riscv_th_vmsgeu_vv_u64m2_b32(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u64m2_b32 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv2i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool32_t test_vmsgeu_vx_u64m2_b32(vuint64m2_t op1, uint64_t op2, size_t vl) { + return __riscv_th_vmsgeu_vx_u64m2_b32(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u64m4_b16 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv4i64.nxv4i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmsgeu_vv_u64m4_b16(vuint64m4_t op1, vuint64m4_t op2, size_t vl) { + return __riscv_th_vmsgeu_vv_u64m4_b16(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u64m4_b16 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv4i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmsgeu_vx_u64m4_b16(vuint64m4_t op1, uint64_t op2, size_t vl) { + return __riscv_th_vmsgeu_vx_u64m4_b16(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u64m8_b8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv8i64.nxv8i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsgeu_vv_u64m8_b8(vuint64m8_t op1, vuint64m8_t op2, size_t vl) { + return __riscv_th_vmsgeu_vv_u64m8_b8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u64m8_b8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.nxv8i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsgeu_vx_u64m8_b8(vuint64m8_t op1, uint64_t op2, size_t vl) { + return __riscv_th_vmsgeu_vx_u64m8_b8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u8m1_b8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsgeu_vv_u8m1_b8_m(vbool8_t mask, vuint8m1_t op1, vuint8m1_t op2, size_t vl) { + return __riscv_th_vmsgeu_vv_u8m1_b8_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u8m1_b8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsgeu_vx_u8m1_b8_m(vbool8_t mask, vuint8m1_t op1, uint8_t op2, size_t vl) { + return __riscv_th_vmsgeu_vx_u8m1_b8_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u8m2_b4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmsgeu_vv_u8m2_b4_m(vbool4_t mask, vuint8m2_t op1, vuint8m2_t op2, size_t vl) { + return __riscv_th_vmsgeu_vv_u8m2_b4_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u8m2_b4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmsgeu_vx_u8m2_b4_m(vbool4_t mask, vuint8m2_t op1, uint8_t op2, size_t vl) { + return __riscv_th_vmsgeu_vx_u8m2_b4_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u8m4_b2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool2_t test_vmsgeu_vv_u8m4_b2_m(vbool2_t mask, vuint8m4_t op1, vuint8m4_t op2, size_t vl) { + return __riscv_th_vmsgeu_vv_u8m4_b2_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u8m4_b2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool2_t test_vmsgeu_vx_u8m4_b2_m(vbool2_t mask, vuint8m4_t op1, uint8_t op2, size_t vl) { + return __riscv_th_vmsgeu_vx_u8m4_b2_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u8m8_b1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool1_t test_vmsgeu_vv_u8m8_b1_m(vbool1_t mask, vuint8m8_t op1, vuint8m8_t op2, size_t vl) { + return __riscv_th_vmsgeu_vv_u8m8_b1_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u8m8_b1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool1_t test_vmsgeu_vx_u8m8_b1_m(vbool1_t mask, vuint8m8_t op1, uint8_t op2, size_t vl) { + return __riscv_th_vmsgeu_vx_u8m8_b1_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u16m1_b16_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmsgeu_vv_u16m1_b16_m(vbool16_t mask, vuint16m1_t op1, vuint16m1_t op2, size_t vl) { + return __riscv_th_vmsgeu_vv_u16m1_b16_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u16m1_b16_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmsgeu_vx_u16m1_b16_m(vbool16_t mask, vuint16m1_t op1, uint16_t op2, size_t vl) { + return __riscv_th_vmsgeu_vx_u16m1_b16_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u16m2_b8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsgeu_vv_u16m2_b8_m(vbool8_t mask, vuint16m2_t op1, vuint16m2_t op2, size_t vl) { + return __riscv_th_vmsgeu_vv_u16m2_b8_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u16m2_b8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsgeu_vx_u16m2_b8_m(vbool8_t mask, vuint16m2_t op1, uint16_t op2, size_t vl) { + return __riscv_th_vmsgeu_vx_u16m2_b8_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u16m4_b4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmsgeu_vv_u16m4_b4_m(vbool4_t mask, vuint16m4_t op1, vuint16m4_t op2, size_t vl) { + return __riscv_th_vmsgeu_vv_u16m4_b4_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u16m4_b4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmsgeu_vx_u16m4_b4_m(vbool4_t mask, vuint16m4_t op1, uint16_t op2, size_t vl) { + return __riscv_th_vmsgeu_vx_u16m4_b4_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u16m8_b2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool2_t test_vmsgeu_vv_u16m8_b2_m(vbool2_t mask, vuint16m8_t op1, vuint16m8_t op2, size_t vl) { + return __riscv_th_vmsgeu_vv_u16m8_b2_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u16m8_b2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool2_t test_vmsgeu_vx_u16m8_b2_m(vbool2_t mask, vuint16m8_t op1, uint16_t op2, size_t vl) { + return __riscv_th_vmsgeu_vx_u16m8_b2_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u32m1_b32_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool32_t test_vmsgeu_vv_u32m1_b32_m(vbool32_t mask, vuint32m1_t op1, vuint32m1_t op2, size_t vl) { + return __riscv_th_vmsgeu_vv_u32m1_b32_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u32m1_b32_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool32_t test_vmsgeu_vx_u32m1_b32_m(vbool32_t mask, vuint32m1_t op1, uint32_t op2, size_t vl) { + return __riscv_th_vmsgeu_vx_u32m1_b32_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u32m2_b16_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmsgeu_vv_u32m2_b16_m(vbool16_t mask, vuint32m2_t op1, vuint32m2_t op2, size_t vl) { + return __riscv_th_vmsgeu_vv_u32m2_b16_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u32m2_b16_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmsgeu_vx_u32m2_b16_m(vbool16_t mask, vuint32m2_t op1, uint32_t op2, size_t vl) { + return __riscv_th_vmsgeu_vx_u32m2_b16_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u32m4_b8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsgeu_vv_u32m4_b8_m(vbool8_t mask, vuint32m4_t op1, vuint32m4_t op2, size_t vl) { + return __riscv_th_vmsgeu_vv_u32m4_b8_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u32m4_b8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsgeu_vx_u32m4_b8_m(vbool8_t mask, vuint32m4_t op1, uint32_t op2, size_t vl) { + return __riscv_th_vmsgeu_vx_u32m4_b8_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u32m8_b4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmsgeu_vv_u32m8_b4_m(vbool4_t mask, vuint32m8_t op1, vuint32m8_t op2, size_t vl) { + return __riscv_th_vmsgeu_vv_u32m8_b4_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u32m8_b4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmsgeu_vx_u32m8_b4_m(vbool4_t mask, vuint32m8_t op1, uint32_t op2, size_t vl) { + return __riscv_th_vmsgeu_vx_u32m8_b4_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u64m1_b64_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool64_t test_vmsgeu_vv_u64m1_b64_m(vbool64_t mask, vuint64m1_t op1, vuint64m1_t op2, size_t vl) { + return __riscv_th_vmsgeu_vv_u64m1_b64_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u64m1_b64_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool64_t test_vmsgeu_vx_u64m1_b64_m(vbool64_t mask, vuint64m1_t op1, uint64_t op2, size_t vl) { + return __riscv_th_vmsgeu_vx_u64m1_b64_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u64m2_b32_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool32_t test_vmsgeu_vv_u64m2_b32_m(vbool32_t mask, vuint64m2_t op1, vuint64m2_t op2, size_t vl) { + return __riscv_th_vmsgeu_vv_u64m2_b32_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u64m2_b32_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool32_t test_vmsgeu_vx_u64m2_b32_m(vbool32_t mask, vuint64m2_t op1, uint64_t op2, size_t vl) { + return __riscv_th_vmsgeu_vx_u64m2_b32_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u64m4_b16_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmsgeu_vv_u64m4_b16_m(vbool16_t mask, vuint64m4_t op1, vuint64m4_t op2, size_t vl) { + return __riscv_th_vmsgeu_vv_u64m4_b16_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u64m4_b16_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmsgeu_vx_u64m4_b16_m(vbool16_t mask, vuint64m4_t op1, uint64_t op2, size_t vl) { + return __riscv_th_vmsgeu_vx_u64m4_b16_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vv_u64m8_b8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsgeu_vv_u64m8_b8_m(vbool8_t mask, vuint64m8_t op1, vuint64m8_t op2, size_t vl) { + return __riscv_th_vmsgeu_vv_u64m8_b8_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgeu_vx_u64m8_b8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgeu.mask.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsgeu_vx_u64m8_b8_m(vbool8_t mask, vuint64m8_t op1, uint64_t op2, size_t vl) { + return __riscv_th_vmsgeu_vx_u64m8_b8_m(mask, op1, op2, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-compare/thead/vmsgt.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-compare/thead/vmsgt.c new file mode 100644 index 00000000000000..7f86c067549734 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-compare/thead/vmsgt.c @@ -0,0 +1,647 @@ +// RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ +// RUN: -disable-O0-optnone -emit-llvm %s -o - | \ +// RUN: opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i8m1_b8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv8i8.nxv8i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsgt_vv_i8m1_b8(vint8m1_t op1, vint8m1_t op2, size_t vl) { + return __riscv_th_vmsgt_vv_i8m1_b8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i8m1_b8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv8i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsgt_vx_i8m1_b8(vint8m1_t op1, int8_t op2, size_t vl) { + return __riscv_th_vmsgt_vx_i8m1_b8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i8m2_b4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv16i8.nxv16i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmsgt_vv_i8m2_b4(vint8m2_t op1, vint8m2_t op2, size_t vl) { + return __riscv_th_vmsgt_vv_i8m2_b4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i8m2_b4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv16i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmsgt_vx_i8m2_b4(vint8m2_t op1, int8_t op2, size_t vl) { + return __riscv_th_vmsgt_vx_i8m2_b4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i8m4_b2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv32i8.nxv32i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool2_t test_vmsgt_vv_i8m4_b2(vint8m4_t op1, vint8m4_t op2, size_t vl) { + return __riscv_th_vmsgt_vv_i8m4_b2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i8m4_b2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv32i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool2_t test_vmsgt_vx_i8m4_b2(vint8m4_t op1, int8_t op2, size_t vl) { + return __riscv_th_vmsgt_vx_i8m4_b2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i8m8_b1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv64i8.nxv64i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool1_t test_vmsgt_vv_i8m8_b1(vint8m8_t op1, vint8m8_t op2, size_t vl) { + return __riscv_th_vmsgt_vv_i8m8_b1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i8m8_b1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv64i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool1_t test_vmsgt_vx_i8m8_b1(vint8m8_t op1, int8_t op2, size_t vl) { + return __riscv_th_vmsgt_vx_i8m8_b1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i16m1_b16 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv4i16.nxv4i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmsgt_vv_i16m1_b16(vint16m1_t op1, vint16m1_t op2, size_t vl) { + return __riscv_th_vmsgt_vv_i16m1_b16(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i16m1_b16 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv4i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmsgt_vx_i16m1_b16(vint16m1_t op1, int16_t op2, size_t vl) { + return __riscv_th_vmsgt_vx_i16m1_b16(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i16m2_b8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv8i16.nxv8i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsgt_vv_i16m2_b8(vint16m2_t op1, vint16m2_t op2, size_t vl) { + return __riscv_th_vmsgt_vv_i16m2_b8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i16m2_b8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv8i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsgt_vx_i16m2_b8(vint16m2_t op1, int16_t op2, size_t vl) { + return __riscv_th_vmsgt_vx_i16m2_b8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i16m4_b4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv16i16.nxv16i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmsgt_vv_i16m4_b4(vint16m4_t op1, vint16m4_t op2, size_t vl) { + return __riscv_th_vmsgt_vv_i16m4_b4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i16m4_b4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv16i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmsgt_vx_i16m4_b4(vint16m4_t op1, int16_t op2, size_t vl) { + return __riscv_th_vmsgt_vx_i16m4_b4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i16m8_b2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv32i16.nxv32i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool2_t test_vmsgt_vv_i16m8_b2(vint16m8_t op1, vint16m8_t op2, size_t vl) { + return __riscv_th_vmsgt_vv_i16m8_b2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i16m8_b2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv32i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool2_t test_vmsgt_vx_i16m8_b2(vint16m8_t op1, int16_t op2, size_t vl) { + return __riscv_th_vmsgt_vx_i16m8_b2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i32m1_b32 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv2i32.nxv2i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool32_t test_vmsgt_vv_i32m1_b32(vint32m1_t op1, vint32m1_t op2, size_t vl) { + return __riscv_th_vmsgt_vv_i32m1_b32(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i32m1_b32 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv2i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool32_t test_vmsgt_vx_i32m1_b32(vint32m1_t op1, int32_t op2, size_t vl) { + return __riscv_th_vmsgt_vx_i32m1_b32(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i32m2_b16 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv4i32.nxv4i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmsgt_vv_i32m2_b16(vint32m2_t op1, vint32m2_t op2, size_t vl) { + return __riscv_th_vmsgt_vv_i32m2_b16(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i32m2_b16 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv4i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmsgt_vx_i32m2_b16(vint32m2_t op1, int32_t op2, size_t vl) { + return __riscv_th_vmsgt_vx_i32m2_b16(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i32m4_b8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv8i32.nxv8i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsgt_vv_i32m4_b8(vint32m4_t op1, vint32m4_t op2, size_t vl) { + return __riscv_th_vmsgt_vv_i32m4_b8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i32m4_b8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv8i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsgt_vx_i32m4_b8(vint32m4_t op1, int32_t op2, size_t vl) { + return __riscv_th_vmsgt_vx_i32m4_b8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i32m8_b4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv16i32.nxv16i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmsgt_vv_i32m8_b4(vint32m8_t op1, vint32m8_t op2, size_t vl) { + return __riscv_th_vmsgt_vv_i32m8_b4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i32m8_b4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv16i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmsgt_vx_i32m8_b4(vint32m8_t op1, int32_t op2, size_t vl) { + return __riscv_th_vmsgt_vx_i32m8_b4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i64m1_b64 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv1i64.nxv1i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool64_t test_vmsgt_vv_i64m1_b64(vint64m1_t op1, vint64m1_t op2, size_t vl) { + return __riscv_th_vmsgt_vv_i64m1_b64(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i64m1_b64 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv1i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool64_t test_vmsgt_vx_i64m1_b64(vint64m1_t op1, int64_t op2, size_t vl) { + return __riscv_th_vmsgt_vx_i64m1_b64(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i64m2_b32 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv2i64.nxv2i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool32_t test_vmsgt_vv_i64m2_b32(vint64m2_t op1, vint64m2_t op2, size_t vl) { + return __riscv_th_vmsgt_vv_i64m2_b32(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i64m2_b32 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv2i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool32_t test_vmsgt_vx_i64m2_b32(vint64m2_t op1, int64_t op2, size_t vl) { + return __riscv_th_vmsgt_vx_i64m2_b32(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i64m4_b16 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv4i64.nxv4i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmsgt_vv_i64m4_b16(vint64m4_t op1, vint64m4_t op2, size_t vl) { + return __riscv_th_vmsgt_vv_i64m4_b16(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i64m4_b16 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv4i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmsgt_vx_i64m4_b16(vint64m4_t op1, int64_t op2, size_t vl) { + return __riscv_th_vmsgt_vx_i64m4_b16(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i64m8_b8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv8i64.nxv8i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsgt_vv_i64m8_b8(vint64m8_t op1, vint64m8_t op2, size_t vl) { + return __riscv_th_vmsgt_vv_i64m8_b8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i64m8_b8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.nxv8i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsgt_vx_i64m8_b8(vint64m8_t op1, int64_t op2, size_t vl) { + return __riscv_th_vmsgt_vx_i64m8_b8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i8m1_b8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsgt_vv_i8m1_b8_m(vbool8_t mask, vint8m1_t op1, vint8m1_t op2, size_t vl) { + return __riscv_th_vmsgt_vv_i8m1_b8_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i8m1_b8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsgt_vx_i8m1_b8_m(vbool8_t mask, vint8m1_t op1, int8_t op2, size_t vl) { + return __riscv_th_vmsgt_vx_i8m1_b8_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i8m2_b4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmsgt_vv_i8m2_b4_m(vbool4_t mask, vint8m2_t op1, vint8m2_t op2, size_t vl) { + return __riscv_th_vmsgt_vv_i8m2_b4_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i8m2_b4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmsgt_vx_i8m2_b4_m(vbool4_t mask, vint8m2_t op1, int8_t op2, size_t vl) { + return __riscv_th_vmsgt_vx_i8m2_b4_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i8m4_b2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool2_t test_vmsgt_vv_i8m4_b2_m(vbool2_t mask, vint8m4_t op1, vint8m4_t op2, size_t vl) { + return __riscv_th_vmsgt_vv_i8m4_b2_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i8m4_b2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool2_t test_vmsgt_vx_i8m4_b2_m(vbool2_t mask, vint8m4_t op1, int8_t op2, size_t vl) { + return __riscv_th_vmsgt_vx_i8m4_b2_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i8m8_b1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool1_t test_vmsgt_vv_i8m8_b1_m(vbool1_t mask, vint8m8_t op1, vint8m8_t op2, size_t vl) { + return __riscv_th_vmsgt_vv_i8m8_b1_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i8m8_b1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool1_t test_vmsgt_vx_i8m8_b1_m(vbool1_t mask, vint8m8_t op1, int8_t op2, size_t vl) { + return __riscv_th_vmsgt_vx_i8m8_b1_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i16m1_b16_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmsgt_vv_i16m1_b16_m(vbool16_t mask, vint16m1_t op1, vint16m1_t op2, size_t vl) { + return __riscv_th_vmsgt_vv_i16m1_b16_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i16m1_b16_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmsgt_vx_i16m1_b16_m(vbool16_t mask, vint16m1_t op1, int16_t op2, size_t vl) { + return __riscv_th_vmsgt_vx_i16m1_b16_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i16m2_b8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsgt_vv_i16m2_b8_m(vbool8_t mask, vint16m2_t op1, vint16m2_t op2, size_t vl) { + return __riscv_th_vmsgt_vv_i16m2_b8_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i16m2_b8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsgt_vx_i16m2_b8_m(vbool8_t mask, vint16m2_t op1, int16_t op2, size_t vl) { + return __riscv_th_vmsgt_vx_i16m2_b8_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i16m4_b4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmsgt_vv_i16m4_b4_m(vbool4_t mask, vint16m4_t op1, vint16m4_t op2, size_t vl) { + return __riscv_th_vmsgt_vv_i16m4_b4_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i16m4_b4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmsgt_vx_i16m4_b4_m(vbool4_t mask, vint16m4_t op1, int16_t op2, size_t vl) { + return __riscv_th_vmsgt_vx_i16m4_b4_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i16m8_b2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool2_t test_vmsgt_vv_i16m8_b2_m(vbool2_t mask, vint16m8_t op1, vint16m8_t op2, size_t vl) { + return __riscv_th_vmsgt_vv_i16m8_b2_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i16m8_b2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool2_t test_vmsgt_vx_i16m8_b2_m(vbool2_t mask, vint16m8_t op1, int16_t op2, size_t vl) { + return __riscv_th_vmsgt_vx_i16m8_b2_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i32m1_b32_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool32_t test_vmsgt_vv_i32m1_b32_m(vbool32_t mask, vint32m1_t op1, vint32m1_t op2, size_t vl) { + return __riscv_th_vmsgt_vv_i32m1_b32_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i32m1_b32_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool32_t test_vmsgt_vx_i32m1_b32_m(vbool32_t mask, vint32m1_t op1, int32_t op2, size_t vl) { + return __riscv_th_vmsgt_vx_i32m1_b32_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i32m2_b16_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmsgt_vv_i32m2_b16_m(vbool16_t mask, vint32m2_t op1, vint32m2_t op2, size_t vl) { + return __riscv_th_vmsgt_vv_i32m2_b16_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i32m2_b16_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmsgt_vx_i32m2_b16_m(vbool16_t mask, vint32m2_t op1, int32_t op2, size_t vl) { + return __riscv_th_vmsgt_vx_i32m2_b16_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i32m4_b8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsgt_vv_i32m4_b8_m(vbool8_t mask, vint32m4_t op1, vint32m4_t op2, size_t vl) { + return __riscv_th_vmsgt_vv_i32m4_b8_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i32m4_b8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsgt_vx_i32m4_b8_m(vbool8_t mask, vint32m4_t op1, int32_t op2, size_t vl) { + return __riscv_th_vmsgt_vx_i32m4_b8_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i32m8_b4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmsgt_vv_i32m8_b4_m(vbool4_t mask, vint32m8_t op1, vint32m8_t op2, size_t vl) { + return __riscv_th_vmsgt_vv_i32m8_b4_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i32m8_b4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmsgt_vx_i32m8_b4_m(vbool4_t mask, vint32m8_t op1, int32_t op2, size_t vl) { + return __riscv_th_vmsgt_vx_i32m8_b4_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i64m1_b64_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool64_t test_vmsgt_vv_i64m1_b64_m(vbool64_t mask, vint64m1_t op1, vint64m1_t op2, size_t vl) { + return __riscv_th_vmsgt_vv_i64m1_b64_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i64m1_b64_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool64_t test_vmsgt_vx_i64m1_b64_m(vbool64_t mask, vint64m1_t op1, int64_t op2, size_t vl) { + return __riscv_th_vmsgt_vx_i64m1_b64_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i64m2_b32_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool32_t test_vmsgt_vv_i64m2_b32_m(vbool32_t mask, vint64m2_t op1, vint64m2_t op2, size_t vl) { + return __riscv_th_vmsgt_vv_i64m2_b32_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i64m2_b32_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool32_t test_vmsgt_vx_i64m2_b32_m(vbool32_t mask, vint64m2_t op1, int64_t op2, size_t vl) { + return __riscv_th_vmsgt_vx_i64m2_b32_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i64m4_b16_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmsgt_vv_i64m4_b16_m(vbool16_t mask, vint64m4_t op1, vint64m4_t op2, size_t vl) { + return __riscv_th_vmsgt_vv_i64m4_b16_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i64m4_b16_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmsgt_vx_i64m4_b16_m(vbool16_t mask, vint64m4_t op1, int64_t op2, size_t vl) { + return __riscv_th_vmsgt_vx_i64m4_b16_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgt_vv_i64m8_b8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsgt_vv_i64m8_b8_m(vbool8_t mask, vint64m8_t op1, vint64m8_t op2, size_t vl) { + return __riscv_th_vmsgt_vv_i64m8_b8_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgt_vx_i64m8_b8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgt.mask.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsgt_vx_i64m8_b8_m(vbool8_t mask, vint64m8_t op1, int64_t op2, size_t vl) { + return __riscv_th_vmsgt_vx_i64m8_b8_m(mask, op1, op2, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-compare/thead/vmsgtu.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-compare/thead/vmsgtu.c new file mode 100644 index 00000000000000..4ada9cf864dbba --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-compare/thead/vmsgtu.c @@ -0,0 +1,647 @@ +// RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ +// RUN: -disable-O0-optnone -emit-llvm %s -o - | \ +// RUN: opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u8m1_b8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv8i8.nxv8i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsgtu_vv_u8m1_b8(vuint8m1_t op1, vuint8m1_t op2, size_t vl) { + return __riscv_th_vmsgtu_vv_u8m1_b8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u8m1_b8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv8i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsgtu_vx_u8m1_b8(vuint8m1_t op1, uint8_t op2, size_t vl) { + return __riscv_th_vmsgtu_vx_u8m1_b8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u8m2_b4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv16i8.nxv16i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmsgtu_vv_u8m2_b4(vuint8m2_t op1, vuint8m2_t op2, size_t vl) { + return __riscv_th_vmsgtu_vv_u8m2_b4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u8m2_b4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv16i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmsgtu_vx_u8m2_b4(vuint8m2_t op1, uint8_t op2, size_t vl) { + return __riscv_th_vmsgtu_vx_u8m2_b4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u8m4_b2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv32i8.nxv32i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool2_t test_vmsgtu_vv_u8m4_b2(vuint8m4_t op1, vuint8m4_t op2, size_t vl) { + return __riscv_th_vmsgtu_vv_u8m4_b2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u8m4_b2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv32i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool2_t test_vmsgtu_vx_u8m4_b2(vuint8m4_t op1, uint8_t op2, size_t vl) { + return __riscv_th_vmsgtu_vx_u8m4_b2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u8m8_b1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv64i8.nxv64i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool1_t test_vmsgtu_vv_u8m8_b1(vuint8m8_t op1, vuint8m8_t op2, size_t vl) { + return __riscv_th_vmsgtu_vv_u8m8_b1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u8m8_b1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv64i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool1_t test_vmsgtu_vx_u8m8_b1(vuint8m8_t op1, uint8_t op2, size_t vl) { + return __riscv_th_vmsgtu_vx_u8m8_b1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u16m1_b16 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv4i16.nxv4i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmsgtu_vv_u16m1_b16(vuint16m1_t op1, vuint16m1_t op2, size_t vl) { + return __riscv_th_vmsgtu_vv_u16m1_b16(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u16m1_b16 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv4i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmsgtu_vx_u16m1_b16(vuint16m1_t op1, uint16_t op2, size_t vl) { + return __riscv_th_vmsgtu_vx_u16m1_b16(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u16m2_b8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv8i16.nxv8i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsgtu_vv_u16m2_b8(vuint16m2_t op1, vuint16m2_t op2, size_t vl) { + return __riscv_th_vmsgtu_vv_u16m2_b8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u16m2_b8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv8i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsgtu_vx_u16m2_b8(vuint16m2_t op1, uint16_t op2, size_t vl) { + return __riscv_th_vmsgtu_vx_u16m2_b8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u16m4_b4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv16i16.nxv16i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmsgtu_vv_u16m4_b4(vuint16m4_t op1, vuint16m4_t op2, size_t vl) { + return __riscv_th_vmsgtu_vv_u16m4_b4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u16m4_b4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv16i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmsgtu_vx_u16m4_b4(vuint16m4_t op1, uint16_t op2, size_t vl) { + return __riscv_th_vmsgtu_vx_u16m4_b4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u16m8_b2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv32i16.nxv32i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool2_t test_vmsgtu_vv_u16m8_b2(vuint16m8_t op1, vuint16m8_t op2, size_t vl) { + return __riscv_th_vmsgtu_vv_u16m8_b2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u16m8_b2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv32i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool2_t test_vmsgtu_vx_u16m8_b2(vuint16m8_t op1, uint16_t op2, size_t vl) { + return __riscv_th_vmsgtu_vx_u16m8_b2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u32m1_b32 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv2i32.nxv2i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool32_t test_vmsgtu_vv_u32m1_b32(vuint32m1_t op1, vuint32m1_t op2, size_t vl) { + return __riscv_th_vmsgtu_vv_u32m1_b32(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u32m1_b32 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv2i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool32_t test_vmsgtu_vx_u32m1_b32(vuint32m1_t op1, uint32_t op2, size_t vl) { + return __riscv_th_vmsgtu_vx_u32m1_b32(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u32m2_b16 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv4i32.nxv4i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmsgtu_vv_u32m2_b16(vuint32m2_t op1, vuint32m2_t op2, size_t vl) { + return __riscv_th_vmsgtu_vv_u32m2_b16(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u32m2_b16 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv4i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmsgtu_vx_u32m2_b16(vuint32m2_t op1, uint32_t op2, size_t vl) { + return __riscv_th_vmsgtu_vx_u32m2_b16(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u32m4_b8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv8i32.nxv8i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsgtu_vv_u32m4_b8(vuint32m4_t op1, vuint32m4_t op2, size_t vl) { + return __riscv_th_vmsgtu_vv_u32m4_b8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u32m4_b8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv8i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsgtu_vx_u32m4_b8(vuint32m4_t op1, uint32_t op2, size_t vl) { + return __riscv_th_vmsgtu_vx_u32m4_b8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u32m8_b4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv16i32.nxv16i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmsgtu_vv_u32m8_b4(vuint32m8_t op1, vuint32m8_t op2, size_t vl) { + return __riscv_th_vmsgtu_vv_u32m8_b4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u32m8_b4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv16i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmsgtu_vx_u32m8_b4(vuint32m8_t op1, uint32_t op2, size_t vl) { + return __riscv_th_vmsgtu_vx_u32m8_b4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u64m1_b64 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv1i64.nxv1i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool64_t test_vmsgtu_vv_u64m1_b64(vuint64m1_t op1, vuint64m1_t op2, size_t vl) { + return __riscv_th_vmsgtu_vv_u64m1_b64(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u64m1_b64 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv1i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool64_t test_vmsgtu_vx_u64m1_b64(vuint64m1_t op1, uint64_t op2, size_t vl) { + return __riscv_th_vmsgtu_vx_u64m1_b64(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u64m2_b32 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv2i64.nxv2i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool32_t test_vmsgtu_vv_u64m2_b32(vuint64m2_t op1, vuint64m2_t op2, size_t vl) { + return __riscv_th_vmsgtu_vv_u64m2_b32(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u64m2_b32 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv2i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool32_t test_vmsgtu_vx_u64m2_b32(vuint64m2_t op1, uint64_t op2, size_t vl) { + return __riscv_th_vmsgtu_vx_u64m2_b32(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u64m4_b16 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv4i64.nxv4i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmsgtu_vv_u64m4_b16(vuint64m4_t op1, vuint64m4_t op2, size_t vl) { + return __riscv_th_vmsgtu_vv_u64m4_b16(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u64m4_b16 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv4i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmsgtu_vx_u64m4_b16(vuint64m4_t op1, uint64_t op2, size_t vl) { + return __riscv_th_vmsgtu_vx_u64m4_b16(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u64m8_b8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv8i64.nxv8i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsgtu_vv_u64m8_b8(vuint64m8_t op1, vuint64m8_t op2, size_t vl) { + return __riscv_th_vmsgtu_vv_u64m8_b8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u64m8_b8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.nxv8i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsgtu_vx_u64m8_b8(vuint64m8_t op1, uint64_t op2, size_t vl) { + return __riscv_th_vmsgtu_vx_u64m8_b8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u8m1_b8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsgtu_vv_u8m1_b8_m(vbool8_t mask, vuint8m1_t op1, vuint8m1_t op2, size_t vl) { + return __riscv_th_vmsgtu_vv_u8m1_b8_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u8m1_b8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsgtu_vx_u8m1_b8_m(vbool8_t mask, vuint8m1_t op1, uint8_t op2, size_t vl) { + return __riscv_th_vmsgtu_vx_u8m1_b8_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u8m2_b4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmsgtu_vv_u8m2_b4_m(vbool4_t mask, vuint8m2_t op1, vuint8m2_t op2, size_t vl) { + return __riscv_th_vmsgtu_vv_u8m2_b4_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u8m2_b4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmsgtu_vx_u8m2_b4_m(vbool4_t mask, vuint8m2_t op1, uint8_t op2, size_t vl) { + return __riscv_th_vmsgtu_vx_u8m2_b4_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u8m4_b2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool2_t test_vmsgtu_vv_u8m4_b2_m(vbool2_t mask, vuint8m4_t op1, vuint8m4_t op2, size_t vl) { + return __riscv_th_vmsgtu_vv_u8m4_b2_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u8m4_b2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool2_t test_vmsgtu_vx_u8m4_b2_m(vbool2_t mask, vuint8m4_t op1, uint8_t op2, size_t vl) { + return __riscv_th_vmsgtu_vx_u8m4_b2_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u8m8_b1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool1_t test_vmsgtu_vv_u8m8_b1_m(vbool1_t mask, vuint8m8_t op1, vuint8m8_t op2, size_t vl) { + return __riscv_th_vmsgtu_vv_u8m8_b1_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u8m8_b1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool1_t test_vmsgtu_vx_u8m8_b1_m(vbool1_t mask, vuint8m8_t op1, uint8_t op2, size_t vl) { + return __riscv_th_vmsgtu_vx_u8m8_b1_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u16m1_b16_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmsgtu_vv_u16m1_b16_m(vbool16_t mask, vuint16m1_t op1, vuint16m1_t op2, size_t vl) { + return __riscv_th_vmsgtu_vv_u16m1_b16_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u16m1_b16_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmsgtu_vx_u16m1_b16_m(vbool16_t mask, vuint16m1_t op1, uint16_t op2, size_t vl) { + return __riscv_th_vmsgtu_vx_u16m1_b16_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u16m2_b8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsgtu_vv_u16m2_b8_m(vbool8_t mask, vuint16m2_t op1, vuint16m2_t op2, size_t vl) { + return __riscv_th_vmsgtu_vv_u16m2_b8_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u16m2_b8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsgtu_vx_u16m2_b8_m(vbool8_t mask, vuint16m2_t op1, uint16_t op2, size_t vl) { + return __riscv_th_vmsgtu_vx_u16m2_b8_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u16m4_b4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmsgtu_vv_u16m4_b4_m(vbool4_t mask, vuint16m4_t op1, vuint16m4_t op2, size_t vl) { + return __riscv_th_vmsgtu_vv_u16m4_b4_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u16m4_b4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmsgtu_vx_u16m4_b4_m(vbool4_t mask, vuint16m4_t op1, uint16_t op2, size_t vl) { + return __riscv_th_vmsgtu_vx_u16m4_b4_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u16m8_b2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool2_t test_vmsgtu_vv_u16m8_b2_m(vbool2_t mask, vuint16m8_t op1, vuint16m8_t op2, size_t vl) { + return __riscv_th_vmsgtu_vv_u16m8_b2_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u16m8_b2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool2_t test_vmsgtu_vx_u16m8_b2_m(vbool2_t mask, vuint16m8_t op1, uint16_t op2, size_t vl) { + return __riscv_th_vmsgtu_vx_u16m8_b2_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u32m1_b32_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool32_t test_vmsgtu_vv_u32m1_b32_m(vbool32_t mask, vuint32m1_t op1, vuint32m1_t op2, size_t vl) { + return __riscv_th_vmsgtu_vv_u32m1_b32_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u32m1_b32_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool32_t test_vmsgtu_vx_u32m1_b32_m(vbool32_t mask, vuint32m1_t op1, uint32_t op2, size_t vl) { + return __riscv_th_vmsgtu_vx_u32m1_b32_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u32m2_b16_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmsgtu_vv_u32m2_b16_m(vbool16_t mask, vuint32m2_t op1, vuint32m2_t op2, size_t vl) { + return __riscv_th_vmsgtu_vv_u32m2_b16_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u32m2_b16_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmsgtu_vx_u32m2_b16_m(vbool16_t mask, vuint32m2_t op1, uint32_t op2, size_t vl) { + return __riscv_th_vmsgtu_vx_u32m2_b16_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u32m4_b8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsgtu_vv_u32m4_b8_m(vbool8_t mask, vuint32m4_t op1, vuint32m4_t op2, size_t vl) { + return __riscv_th_vmsgtu_vv_u32m4_b8_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u32m4_b8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsgtu_vx_u32m4_b8_m(vbool8_t mask, vuint32m4_t op1, uint32_t op2, size_t vl) { + return __riscv_th_vmsgtu_vx_u32m4_b8_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u32m8_b4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmsgtu_vv_u32m8_b4_m(vbool4_t mask, vuint32m8_t op1, vuint32m8_t op2, size_t vl) { + return __riscv_th_vmsgtu_vv_u32m8_b4_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u32m8_b4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmsgtu_vx_u32m8_b4_m(vbool4_t mask, vuint32m8_t op1, uint32_t op2, size_t vl) { + return __riscv_th_vmsgtu_vx_u32m8_b4_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u64m1_b64_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool64_t test_vmsgtu_vv_u64m1_b64_m(vbool64_t mask, vuint64m1_t op1, vuint64m1_t op2, size_t vl) { + return __riscv_th_vmsgtu_vv_u64m1_b64_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u64m1_b64_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool64_t test_vmsgtu_vx_u64m1_b64_m(vbool64_t mask, vuint64m1_t op1, uint64_t op2, size_t vl) { + return __riscv_th_vmsgtu_vx_u64m1_b64_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u64m2_b32_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool32_t test_vmsgtu_vv_u64m2_b32_m(vbool32_t mask, vuint64m2_t op1, vuint64m2_t op2, size_t vl) { + return __riscv_th_vmsgtu_vv_u64m2_b32_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u64m2_b32_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool32_t test_vmsgtu_vx_u64m2_b32_m(vbool32_t mask, vuint64m2_t op1, uint64_t op2, size_t vl) { + return __riscv_th_vmsgtu_vx_u64m2_b32_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u64m4_b16_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmsgtu_vv_u64m4_b16_m(vbool16_t mask, vuint64m4_t op1, vuint64m4_t op2, size_t vl) { + return __riscv_th_vmsgtu_vv_u64m4_b16_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u64m4_b16_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmsgtu_vx_u64m4_b16_m(vbool16_t mask, vuint64m4_t op1, uint64_t op2, size_t vl) { + return __riscv_th_vmsgtu_vx_u64m4_b16_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vv_u64m8_b8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsgtu_vv_u64m8_b8_m(vbool8_t mask, vuint64m8_t op1, vuint64m8_t op2, size_t vl) { + return __riscv_th_vmsgtu_vv_u64m8_b8_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsgtu_vx_u64m8_b8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsgtu.mask.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsgtu_vx_u64m8_b8_m(vbool8_t mask, vuint64m8_t op1, uint64_t op2, size_t vl) { + return __riscv_th_vmsgtu_vx_u64m8_b8_m(mask, op1, op2, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-compare/thead/vmslt.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-compare/thead/vmslt.c new file mode 100644 index 00000000000000..86029adda9de67 --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-compare/thead/vmslt.c @@ -0,0 +1,647 @@ +// RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ +// RUN: -disable-O0-optnone -emit-llvm %s -o - | \ +// RUN: opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i8m1_b8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv8i8.nxv8i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmslt_vv_i8m1_b8(vint8m1_t op1, vint8m1_t op2, size_t vl) { + return __riscv_th_vmslt_vv_i8m1_b8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i8m1_b8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv8i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmslt_vx_i8m1_b8(vint8m1_t op1, int8_t op2, size_t vl) { + return __riscv_th_vmslt_vx_i8m1_b8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i8m2_b4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv16i8.nxv16i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmslt_vv_i8m2_b4(vint8m2_t op1, vint8m2_t op2, size_t vl) { + return __riscv_th_vmslt_vv_i8m2_b4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i8m2_b4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv16i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmslt_vx_i8m2_b4(vint8m2_t op1, int8_t op2, size_t vl) { + return __riscv_th_vmslt_vx_i8m2_b4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i8m4_b2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv32i8.nxv32i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool2_t test_vmslt_vv_i8m4_b2(vint8m4_t op1, vint8m4_t op2, size_t vl) { + return __riscv_th_vmslt_vv_i8m4_b2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i8m4_b2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv32i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool2_t test_vmslt_vx_i8m4_b2(vint8m4_t op1, int8_t op2, size_t vl) { + return __riscv_th_vmslt_vx_i8m4_b2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i8m8_b1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv64i8.nxv64i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool1_t test_vmslt_vv_i8m8_b1(vint8m8_t op1, vint8m8_t op2, size_t vl) { + return __riscv_th_vmslt_vv_i8m8_b1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i8m8_b1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv64i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool1_t test_vmslt_vx_i8m8_b1(vint8m8_t op1, int8_t op2, size_t vl) { + return __riscv_th_vmslt_vx_i8m8_b1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i16m1_b16 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv4i16.nxv4i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmslt_vv_i16m1_b16(vint16m1_t op1, vint16m1_t op2, size_t vl) { + return __riscv_th_vmslt_vv_i16m1_b16(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i16m1_b16 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv4i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmslt_vx_i16m1_b16(vint16m1_t op1, int16_t op2, size_t vl) { + return __riscv_th_vmslt_vx_i16m1_b16(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i16m2_b8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv8i16.nxv8i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmslt_vv_i16m2_b8(vint16m2_t op1, vint16m2_t op2, size_t vl) { + return __riscv_th_vmslt_vv_i16m2_b8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i16m2_b8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv8i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmslt_vx_i16m2_b8(vint16m2_t op1, int16_t op2, size_t vl) { + return __riscv_th_vmslt_vx_i16m2_b8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i16m4_b4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv16i16.nxv16i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmslt_vv_i16m4_b4(vint16m4_t op1, vint16m4_t op2, size_t vl) { + return __riscv_th_vmslt_vv_i16m4_b4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i16m4_b4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv16i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmslt_vx_i16m4_b4(vint16m4_t op1, int16_t op2, size_t vl) { + return __riscv_th_vmslt_vx_i16m4_b4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i16m8_b2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv32i16.nxv32i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool2_t test_vmslt_vv_i16m8_b2(vint16m8_t op1, vint16m8_t op2, size_t vl) { + return __riscv_th_vmslt_vv_i16m8_b2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i16m8_b2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv32i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool2_t test_vmslt_vx_i16m8_b2(vint16m8_t op1, int16_t op2, size_t vl) { + return __riscv_th_vmslt_vx_i16m8_b2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i32m1_b32 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv2i32.nxv2i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool32_t test_vmslt_vv_i32m1_b32(vint32m1_t op1, vint32m1_t op2, size_t vl) { + return __riscv_th_vmslt_vv_i32m1_b32(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i32m1_b32 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv2i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool32_t test_vmslt_vx_i32m1_b32(vint32m1_t op1, int32_t op2, size_t vl) { + return __riscv_th_vmslt_vx_i32m1_b32(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i32m2_b16 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv4i32.nxv4i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmslt_vv_i32m2_b16(vint32m2_t op1, vint32m2_t op2, size_t vl) { + return __riscv_th_vmslt_vv_i32m2_b16(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i32m2_b16 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv4i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmslt_vx_i32m2_b16(vint32m2_t op1, int32_t op2, size_t vl) { + return __riscv_th_vmslt_vx_i32m2_b16(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i32m4_b8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv8i32.nxv8i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmslt_vv_i32m4_b8(vint32m4_t op1, vint32m4_t op2, size_t vl) { + return __riscv_th_vmslt_vv_i32m4_b8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i32m4_b8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv8i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmslt_vx_i32m4_b8(vint32m4_t op1, int32_t op2, size_t vl) { + return __riscv_th_vmslt_vx_i32m4_b8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i32m8_b4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv16i32.nxv16i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmslt_vv_i32m8_b4(vint32m8_t op1, vint32m8_t op2, size_t vl) { + return __riscv_th_vmslt_vv_i32m8_b4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i32m8_b4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv16i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmslt_vx_i32m8_b4(vint32m8_t op1, int32_t op2, size_t vl) { + return __riscv_th_vmslt_vx_i32m8_b4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i64m1_b64 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv1i64.nxv1i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool64_t test_vmslt_vv_i64m1_b64(vint64m1_t op1, vint64m1_t op2, size_t vl) { + return __riscv_th_vmslt_vv_i64m1_b64(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i64m1_b64 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv1i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool64_t test_vmslt_vx_i64m1_b64(vint64m1_t op1, int64_t op2, size_t vl) { + return __riscv_th_vmslt_vx_i64m1_b64(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i64m2_b32 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv2i64.nxv2i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool32_t test_vmslt_vv_i64m2_b32(vint64m2_t op1, vint64m2_t op2, size_t vl) { + return __riscv_th_vmslt_vv_i64m2_b32(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i64m2_b32 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv2i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool32_t test_vmslt_vx_i64m2_b32(vint64m2_t op1, int64_t op2, size_t vl) { + return __riscv_th_vmslt_vx_i64m2_b32(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i64m4_b16 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv4i64.nxv4i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmslt_vv_i64m4_b16(vint64m4_t op1, vint64m4_t op2, size_t vl) { + return __riscv_th_vmslt_vv_i64m4_b16(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i64m4_b16 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv4i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmslt_vx_i64m4_b16(vint64m4_t op1, int64_t op2, size_t vl) { + return __riscv_th_vmslt_vx_i64m4_b16(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i64m8_b8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv8i64.nxv8i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmslt_vv_i64m8_b8(vint64m8_t op1, vint64m8_t op2, size_t vl) { + return __riscv_th_vmslt_vv_i64m8_b8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i64m8_b8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.nxv8i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmslt_vx_i64m8_b8(vint64m8_t op1, int64_t op2, size_t vl) { + return __riscv_th_vmslt_vx_i64m8_b8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i8m1_b8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmslt_vv_i8m1_b8_m(vbool8_t mask, vint8m1_t op1, vint8m1_t op2, size_t vl) { + return __riscv_th_vmslt_vv_i8m1_b8_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i8m1_b8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmslt_vx_i8m1_b8_m(vbool8_t mask, vint8m1_t op1, int8_t op2, size_t vl) { + return __riscv_th_vmslt_vx_i8m1_b8_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i8m2_b4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmslt_vv_i8m2_b4_m(vbool4_t mask, vint8m2_t op1, vint8m2_t op2, size_t vl) { + return __riscv_th_vmslt_vv_i8m2_b4_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i8m2_b4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmslt_vx_i8m2_b4_m(vbool4_t mask, vint8m2_t op1, int8_t op2, size_t vl) { + return __riscv_th_vmslt_vx_i8m2_b4_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i8m4_b2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool2_t test_vmslt_vv_i8m4_b2_m(vbool2_t mask, vint8m4_t op1, vint8m4_t op2, size_t vl) { + return __riscv_th_vmslt_vv_i8m4_b2_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i8m4_b2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool2_t test_vmslt_vx_i8m4_b2_m(vbool2_t mask, vint8m4_t op1, int8_t op2, size_t vl) { + return __riscv_th_vmslt_vx_i8m4_b2_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i8m8_b1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool1_t test_vmslt_vv_i8m8_b1_m(vbool1_t mask, vint8m8_t op1, vint8m8_t op2, size_t vl) { + return __riscv_th_vmslt_vv_i8m8_b1_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i8m8_b1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool1_t test_vmslt_vx_i8m8_b1_m(vbool1_t mask, vint8m8_t op1, int8_t op2, size_t vl) { + return __riscv_th_vmslt_vx_i8m8_b1_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i16m1_b16_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmslt_vv_i16m1_b16_m(vbool16_t mask, vint16m1_t op1, vint16m1_t op2, size_t vl) { + return __riscv_th_vmslt_vv_i16m1_b16_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i16m1_b16_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmslt_vx_i16m1_b16_m(vbool16_t mask, vint16m1_t op1, int16_t op2, size_t vl) { + return __riscv_th_vmslt_vx_i16m1_b16_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i16m2_b8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmslt_vv_i16m2_b8_m(vbool8_t mask, vint16m2_t op1, vint16m2_t op2, size_t vl) { + return __riscv_th_vmslt_vv_i16m2_b8_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i16m2_b8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmslt_vx_i16m2_b8_m(vbool8_t mask, vint16m2_t op1, int16_t op2, size_t vl) { + return __riscv_th_vmslt_vx_i16m2_b8_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i16m4_b4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmslt_vv_i16m4_b4_m(vbool4_t mask, vint16m4_t op1, vint16m4_t op2, size_t vl) { + return __riscv_th_vmslt_vv_i16m4_b4_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i16m4_b4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmslt_vx_i16m4_b4_m(vbool4_t mask, vint16m4_t op1, int16_t op2, size_t vl) { + return __riscv_th_vmslt_vx_i16m4_b4_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i16m8_b2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool2_t test_vmslt_vv_i16m8_b2_m(vbool2_t mask, vint16m8_t op1, vint16m8_t op2, size_t vl) { + return __riscv_th_vmslt_vv_i16m8_b2_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i16m8_b2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool2_t test_vmslt_vx_i16m8_b2_m(vbool2_t mask, vint16m8_t op1, int16_t op2, size_t vl) { + return __riscv_th_vmslt_vx_i16m8_b2_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i32m1_b32_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool32_t test_vmslt_vv_i32m1_b32_m(vbool32_t mask, vint32m1_t op1, vint32m1_t op2, size_t vl) { + return __riscv_th_vmslt_vv_i32m1_b32_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i32m1_b32_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool32_t test_vmslt_vx_i32m1_b32_m(vbool32_t mask, vint32m1_t op1, int32_t op2, size_t vl) { + return __riscv_th_vmslt_vx_i32m1_b32_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i32m2_b16_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmslt_vv_i32m2_b16_m(vbool16_t mask, vint32m2_t op1, vint32m2_t op2, size_t vl) { + return __riscv_th_vmslt_vv_i32m2_b16_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i32m2_b16_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmslt_vx_i32m2_b16_m(vbool16_t mask, vint32m2_t op1, int32_t op2, size_t vl) { + return __riscv_th_vmslt_vx_i32m2_b16_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i32m4_b8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmslt_vv_i32m4_b8_m(vbool8_t mask, vint32m4_t op1, vint32m4_t op2, size_t vl) { + return __riscv_th_vmslt_vv_i32m4_b8_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i32m4_b8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmslt_vx_i32m4_b8_m(vbool8_t mask, vint32m4_t op1, int32_t op2, size_t vl) { + return __riscv_th_vmslt_vx_i32m4_b8_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i32m8_b4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmslt_vv_i32m8_b4_m(vbool4_t mask, vint32m8_t op1, vint32m8_t op2, size_t vl) { + return __riscv_th_vmslt_vv_i32m8_b4_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i32m8_b4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmslt_vx_i32m8_b4_m(vbool4_t mask, vint32m8_t op1, int32_t op2, size_t vl) { + return __riscv_th_vmslt_vx_i32m8_b4_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i64m1_b64_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool64_t test_vmslt_vv_i64m1_b64_m(vbool64_t mask, vint64m1_t op1, vint64m1_t op2, size_t vl) { + return __riscv_th_vmslt_vv_i64m1_b64_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i64m1_b64_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool64_t test_vmslt_vx_i64m1_b64_m(vbool64_t mask, vint64m1_t op1, int64_t op2, size_t vl) { + return __riscv_th_vmslt_vx_i64m1_b64_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i64m2_b32_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool32_t test_vmslt_vv_i64m2_b32_m(vbool32_t mask, vint64m2_t op1, vint64m2_t op2, size_t vl) { + return __riscv_th_vmslt_vv_i64m2_b32_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i64m2_b32_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool32_t test_vmslt_vx_i64m2_b32_m(vbool32_t mask, vint64m2_t op1, int64_t op2, size_t vl) { + return __riscv_th_vmslt_vx_i64m2_b32_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i64m4_b16_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmslt_vv_i64m4_b16_m(vbool16_t mask, vint64m4_t op1, vint64m4_t op2, size_t vl) { + return __riscv_th_vmslt_vv_i64m4_b16_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i64m4_b16_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmslt_vx_i64m4_b16_m(vbool16_t mask, vint64m4_t op1, int64_t op2, size_t vl) { + return __riscv_th_vmslt_vx_i64m4_b16_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmslt_vv_i64m8_b8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmslt_vv_i64m8_b8_m(vbool8_t mask, vint64m8_t op1, vint64m8_t op2, size_t vl) { + return __riscv_th_vmslt_vv_i64m8_b8_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmslt_vx_i64m8_b8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmslt.mask.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmslt_vx_i64m8_b8_m(vbool8_t mask, vint64m8_t op1, int64_t op2, size_t vl) { + return __riscv_th_vmslt_vx_i64m8_b8_m(mask, op1, op2, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-compare/thead/vmsltu.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-compare/thead/vmsltu.c new file mode 100644 index 00000000000000..4b71242544570a --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-compare/thead/vmsltu.c @@ -0,0 +1,647 @@ +// RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ +// RUN: -disable-O0-optnone -emit-llvm %s -o - | \ +// RUN: opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u8m1_b8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv8i8.nxv8i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsltu_vv_u8m1_b8(vuint8m1_t op1, vuint8m1_t op2, size_t vl) { + return __riscv_th_vmsltu_vv_u8m1_b8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u8m1_b8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv8i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsltu_vx_u8m1_b8(vuint8m1_t op1, uint8_t op2, size_t vl) { + return __riscv_th_vmsltu_vx_u8m1_b8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u8m2_b4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv16i8.nxv16i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmsltu_vv_u8m2_b4(vuint8m2_t op1, vuint8m2_t op2, size_t vl) { + return __riscv_th_vmsltu_vv_u8m2_b4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u8m2_b4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv16i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmsltu_vx_u8m2_b4(vuint8m2_t op1, uint8_t op2, size_t vl) { + return __riscv_th_vmsltu_vx_u8m2_b4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u8m4_b2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv32i8.nxv32i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool2_t test_vmsltu_vv_u8m4_b2(vuint8m4_t op1, vuint8m4_t op2, size_t vl) { + return __riscv_th_vmsltu_vv_u8m4_b2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u8m4_b2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv32i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool2_t test_vmsltu_vx_u8m4_b2(vuint8m4_t op1, uint8_t op2, size_t vl) { + return __riscv_th_vmsltu_vx_u8m4_b2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u8m8_b1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv64i8.nxv64i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool1_t test_vmsltu_vv_u8m8_b1(vuint8m8_t op1, vuint8m8_t op2, size_t vl) { + return __riscv_th_vmsltu_vv_u8m8_b1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u8m8_b1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv64i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool1_t test_vmsltu_vx_u8m8_b1(vuint8m8_t op1, uint8_t op2, size_t vl) { + return __riscv_th_vmsltu_vx_u8m8_b1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u16m1_b16 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv4i16.nxv4i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmsltu_vv_u16m1_b16(vuint16m1_t op1, vuint16m1_t op2, size_t vl) { + return __riscv_th_vmsltu_vv_u16m1_b16(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u16m1_b16 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv4i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmsltu_vx_u16m1_b16(vuint16m1_t op1, uint16_t op2, size_t vl) { + return __riscv_th_vmsltu_vx_u16m1_b16(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u16m2_b8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv8i16.nxv8i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsltu_vv_u16m2_b8(vuint16m2_t op1, vuint16m2_t op2, size_t vl) { + return __riscv_th_vmsltu_vv_u16m2_b8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u16m2_b8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv8i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsltu_vx_u16m2_b8(vuint16m2_t op1, uint16_t op2, size_t vl) { + return __riscv_th_vmsltu_vx_u16m2_b8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u16m4_b4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv16i16.nxv16i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmsltu_vv_u16m4_b4(vuint16m4_t op1, vuint16m4_t op2, size_t vl) { + return __riscv_th_vmsltu_vv_u16m4_b4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u16m4_b4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv16i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmsltu_vx_u16m4_b4(vuint16m4_t op1, uint16_t op2, size_t vl) { + return __riscv_th_vmsltu_vx_u16m4_b4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u16m8_b2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv32i16.nxv32i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool2_t test_vmsltu_vv_u16m8_b2(vuint16m8_t op1, vuint16m8_t op2, size_t vl) { + return __riscv_th_vmsltu_vv_u16m8_b2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u16m8_b2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv32i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool2_t test_vmsltu_vx_u16m8_b2(vuint16m8_t op1, uint16_t op2, size_t vl) { + return __riscv_th_vmsltu_vx_u16m8_b2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u32m1_b32 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv2i32.nxv2i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool32_t test_vmsltu_vv_u32m1_b32(vuint32m1_t op1, vuint32m1_t op2, size_t vl) { + return __riscv_th_vmsltu_vv_u32m1_b32(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u32m1_b32 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv2i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool32_t test_vmsltu_vx_u32m1_b32(vuint32m1_t op1, uint32_t op2, size_t vl) { + return __riscv_th_vmsltu_vx_u32m1_b32(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u32m2_b16 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv4i32.nxv4i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmsltu_vv_u32m2_b16(vuint32m2_t op1, vuint32m2_t op2, size_t vl) { + return __riscv_th_vmsltu_vv_u32m2_b16(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u32m2_b16 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv4i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmsltu_vx_u32m2_b16(vuint32m2_t op1, uint32_t op2, size_t vl) { + return __riscv_th_vmsltu_vx_u32m2_b16(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u32m4_b8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv8i32.nxv8i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsltu_vv_u32m4_b8(vuint32m4_t op1, vuint32m4_t op2, size_t vl) { + return __riscv_th_vmsltu_vv_u32m4_b8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u32m4_b8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv8i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsltu_vx_u32m4_b8(vuint32m4_t op1, uint32_t op2, size_t vl) { + return __riscv_th_vmsltu_vx_u32m4_b8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u32m8_b4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv16i32.nxv16i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmsltu_vv_u32m8_b4(vuint32m8_t op1, vuint32m8_t op2, size_t vl) { + return __riscv_th_vmsltu_vv_u32m8_b4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u32m8_b4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv16i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmsltu_vx_u32m8_b4(vuint32m8_t op1, uint32_t op2, size_t vl) { + return __riscv_th_vmsltu_vx_u32m8_b4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u64m1_b64 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv1i64.nxv1i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool64_t test_vmsltu_vv_u64m1_b64(vuint64m1_t op1, vuint64m1_t op2, size_t vl) { + return __riscv_th_vmsltu_vv_u64m1_b64(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u64m1_b64 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv1i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool64_t test_vmsltu_vx_u64m1_b64(vuint64m1_t op1, uint64_t op2, size_t vl) { + return __riscv_th_vmsltu_vx_u64m1_b64(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u64m2_b32 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv2i64.nxv2i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool32_t test_vmsltu_vv_u64m2_b32(vuint64m2_t op1, vuint64m2_t op2, size_t vl) { + return __riscv_th_vmsltu_vv_u64m2_b32(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u64m2_b32 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv2i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool32_t test_vmsltu_vx_u64m2_b32(vuint64m2_t op1, uint64_t op2, size_t vl) { + return __riscv_th_vmsltu_vx_u64m2_b32(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u64m4_b16 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv4i64.nxv4i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmsltu_vv_u64m4_b16(vuint64m4_t op1, vuint64m4_t op2, size_t vl) { + return __riscv_th_vmsltu_vv_u64m4_b16(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u64m4_b16 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv4i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmsltu_vx_u64m4_b16(vuint64m4_t op1, uint64_t op2, size_t vl) { + return __riscv_th_vmsltu_vx_u64m4_b16(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u64m8_b8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv8i64.nxv8i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsltu_vv_u64m8_b8(vuint64m8_t op1, vuint64m8_t op2, size_t vl) { + return __riscv_th_vmsltu_vv_u64m8_b8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u64m8_b8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.nxv8i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsltu_vx_u64m8_b8(vuint64m8_t op1, uint64_t op2, size_t vl) { + return __riscv_th_vmsltu_vx_u64m8_b8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u8m1_b8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsltu_vv_u8m1_b8_m(vbool8_t mask, vuint8m1_t op1, vuint8m1_t op2, size_t vl) { + return __riscv_th_vmsltu_vv_u8m1_b8_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u8m1_b8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsltu_vx_u8m1_b8_m(vbool8_t mask, vuint8m1_t op1, uint8_t op2, size_t vl) { + return __riscv_th_vmsltu_vx_u8m1_b8_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u8m2_b4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmsltu_vv_u8m2_b4_m(vbool4_t mask, vuint8m2_t op1, vuint8m2_t op2, size_t vl) { + return __riscv_th_vmsltu_vv_u8m2_b4_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u8m2_b4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmsltu_vx_u8m2_b4_m(vbool4_t mask, vuint8m2_t op1, uint8_t op2, size_t vl) { + return __riscv_th_vmsltu_vx_u8m2_b4_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u8m4_b2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool2_t test_vmsltu_vv_u8m4_b2_m(vbool2_t mask, vuint8m4_t op1, vuint8m4_t op2, size_t vl) { + return __riscv_th_vmsltu_vv_u8m4_b2_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u8m4_b2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool2_t test_vmsltu_vx_u8m4_b2_m(vbool2_t mask, vuint8m4_t op1, uint8_t op2, size_t vl) { + return __riscv_th_vmsltu_vx_u8m4_b2_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u8m8_b1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool1_t test_vmsltu_vv_u8m8_b1_m(vbool1_t mask, vuint8m8_t op1, vuint8m8_t op2, size_t vl) { + return __riscv_th_vmsltu_vv_u8m8_b1_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u8m8_b1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool1_t test_vmsltu_vx_u8m8_b1_m(vbool1_t mask, vuint8m8_t op1, uint8_t op2, size_t vl) { + return __riscv_th_vmsltu_vx_u8m8_b1_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u16m1_b16_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmsltu_vv_u16m1_b16_m(vbool16_t mask, vuint16m1_t op1, vuint16m1_t op2, size_t vl) { + return __riscv_th_vmsltu_vv_u16m1_b16_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u16m1_b16_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmsltu_vx_u16m1_b16_m(vbool16_t mask, vuint16m1_t op1, uint16_t op2, size_t vl) { + return __riscv_th_vmsltu_vx_u16m1_b16_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u16m2_b8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsltu_vv_u16m2_b8_m(vbool8_t mask, vuint16m2_t op1, vuint16m2_t op2, size_t vl) { + return __riscv_th_vmsltu_vv_u16m2_b8_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u16m2_b8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsltu_vx_u16m2_b8_m(vbool8_t mask, vuint16m2_t op1, uint16_t op2, size_t vl) { + return __riscv_th_vmsltu_vx_u16m2_b8_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u16m4_b4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmsltu_vv_u16m4_b4_m(vbool4_t mask, vuint16m4_t op1, vuint16m4_t op2, size_t vl) { + return __riscv_th_vmsltu_vv_u16m4_b4_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u16m4_b4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmsltu_vx_u16m4_b4_m(vbool4_t mask, vuint16m4_t op1, uint16_t op2, size_t vl) { + return __riscv_th_vmsltu_vx_u16m4_b4_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u16m8_b2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool2_t test_vmsltu_vv_u16m8_b2_m(vbool2_t mask, vuint16m8_t op1, vuint16m8_t op2, size_t vl) { + return __riscv_th_vmsltu_vv_u16m8_b2_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u16m8_b2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool2_t test_vmsltu_vx_u16m8_b2_m(vbool2_t mask, vuint16m8_t op1, uint16_t op2, size_t vl) { + return __riscv_th_vmsltu_vx_u16m8_b2_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u32m1_b32_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool32_t test_vmsltu_vv_u32m1_b32_m(vbool32_t mask, vuint32m1_t op1, vuint32m1_t op2, size_t vl) { + return __riscv_th_vmsltu_vv_u32m1_b32_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u32m1_b32_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool32_t test_vmsltu_vx_u32m1_b32_m(vbool32_t mask, vuint32m1_t op1, uint32_t op2, size_t vl) { + return __riscv_th_vmsltu_vx_u32m1_b32_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u32m2_b16_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmsltu_vv_u32m2_b16_m(vbool16_t mask, vuint32m2_t op1, vuint32m2_t op2, size_t vl) { + return __riscv_th_vmsltu_vv_u32m2_b16_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u32m2_b16_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmsltu_vx_u32m2_b16_m(vbool16_t mask, vuint32m2_t op1, uint32_t op2, size_t vl) { + return __riscv_th_vmsltu_vx_u32m2_b16_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u32m4_b8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsltu_vv_u32m4_b8_m(vbool8_t mask, vuint32m4_t op1, vuint32m4_t op2, size_t vl) { + return __riscv_th_vmsltu_vv_u32m4_b8_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u32m4_b8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsltu_vx_u32m4_b8_m(vbool8_t mask, vuint32m4_t op1, uint32_t op2, size_t vl) { + return __riscv_th_vmsltu_vx_u32m4_b8_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u32m8_b4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmsltu_vv_u32m8_b4_m(vbool4_t mask, vuint32m8_t op1, vuint32m8_t op2, size_t vl) { + return __riscv_th_vmsltu_vv_u32m8_b4_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u32m8_b4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmsltu_vx_u32m8_b4_m(vbool4_t mask, vuint32m8_t op1, uint32_t op2, size_t vl) { + return __riscv_th_vmsltu_vx_u32m8_b4_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u64m1_b64_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool64_t test_vmsltu_vv_u64m1_b64_m(vbool64_t mask, vuint64m1_t op1, vuint64m1_t op2, size_t vl) { + return __riscv_th_vmsltu_vv_u64m1_b64_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u64m1_b64_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool64_t test_vmsltu_vx_u64m1_b64_m(vbool64_t mask, vuint64m1_t op1, uint64_t op2, size_t vl) { + return __riscv_th_vmsltu_vx_u64m1_b64_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u64m2_b32_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool32_t test_vmsltu_vv_u64m2_b32_m(vbool32_t mask, vuint64m2_t op1, vuint64m2_t op2, size_t vl) { + return __riscv_th_vmsltu_vv_u64m2_b32_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u64m2_b32_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool32_t test_vmsltu_vx_u64m2_b32_m(vbool32_t mask, vuint64m2_t op1, uint64_t op2, size_t vl) { + return __riscv_th_vmsltu_vx_u64m2_b32_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u64m4_b16_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmsltu_vv_u64m4_b16_m(vbool16_t mask, vuint64m4_t op1, vuint64m4_t op2, size_t vl) { + return __riscv_th_vmsltu_vv_u64m4_b16_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u64m4_b16_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmsltu_vx_u64m4_b16_m(vbool16_t mask, vuint64m4_t op1, uint64_t op2, size_t vl) { + return __riscv_th_vmsltu_vx_u64m4_b16_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsltu_vv_u64m8_b8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsltu_vv_u64m8_b8_m(vbool8_t mask, vuint64m8_t op1, vuint64m8_t op2, size_t vl) { + return __riscv_th_vmsltu_vv_u64m8_b8_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsltu_vx_u64m8_b8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsltu.mask.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsltu_vx_u64m8_b8_m(vbool8_t mask, vuint64m8_t op1, uint64_t op2, size_t vl) { + return __riscv_th_vmsltu_vx_u64m8_b8_m(mask, op1, op2, vl); +} + diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-compare/thead/vmsne.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-compare/thead/vmsne.c new file mode 100644 index 00000000000000..7831154dd082ce --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-compare/thead/vmsne.c @@ -0,0 +1,1287 @@ +// RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ +// RUN: -disable-O0-optnone -emit-llvm %s -o - | \ +// RUN: opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i8m1_b8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv8i8.nxv8i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsne_vv_i8m1_b8(vint8m1_t op1, vint8m1_t op2, size_t vl) { + return __riscv_th_vmsne_vv_i8m1_b8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i8m1_b8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv8i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsne_vx_i8m1_b8(vint8m1_t op1, int8_t op2, size_t vl) { + return __riscv_th_vmsne_vx_i8m1_b8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i8m2_b4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv16i8.nxv16i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmsne_vv_i8m2_b4(vint8m2_t op1, vint8m2_t op2, size_t vl) { + return __riscv_th_vmsne_vv_i8m2_b4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i8m2_b4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv16i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmsne_vx_i8m2_b4(vint8m2_t op1, int8_t op2, size_t vl) { + return __riscv_th_vmsne_vx_i8m2_b4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i8m4_b2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv32i8.nxv32i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool2_t test_vmsne_vv_i8m4_b2(vint8m4_t op1, vint8m4_t op2, size_t vl) { + return __riscv_th_vmsne_vv_i8m4_b2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i8m4_b2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv32i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool2_t test_vmsne_vx_i8m4_b2(vint8m4_t op1, int8_t op2, size_t vl) { + return __riscv_th_vmsne_vx_i8m4_b2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i8m8_b1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv64i8.nxv64i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool1_t test_vmsne_vv_i8m8_b1(vint8m8_t op1, vint8m8_t op2, size_t vl) { + return __riscv_th_vmsne_vv_i8m8_b1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i8m8_b1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv64i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool1_t test_vmsne_vx_i8m8_b1(vint8m8_t op1, int8_t op2, size_t vl) { + return __riscv_th_vmsne_vx_i8m8_b1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i16m1_b16 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv4i16.nxv4i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmsne_vv_i16m1_b16(vint16m1_t op1, vint16m1_t op2, size_t vl) { + return __riscv_th_vmsne_vv_i16m1_b16(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i16m1_b16 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv4i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmsne_vx_i16m1_b16(vint16m1_t op1, int16_t op2, size_t vl) { + return __riscv_th_vmsne_vx_i16m1_b16(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i16m2_b8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv8i16.nxv8i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsne_vv_i16m2_b8(vint16m2_t op1, vint16m2_t op2, size_t vl) { + return __riscv_th_vmsne_vv_i16m2_b8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i16m2_b8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv8i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsne_vx_i16m2_b8(vint16m2_t op1, int16_t op2, size_t vl) { + return __riscv_th_vmsne_vx_i16m2_b8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i16m4_b4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv16i16.nxv16i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmsne_vv_i16m4_b4(vint16m4_t op1, vint16m4_t op2, size_t vl) { + return __riscv_th_vmsne_vv_i16m4_b4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i16m4_b4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv16i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmsne_vx_i16m4_b4(vint16m4_t op1, int16_t op2, size_t vl) { + return __riscv_th_vmsne_vx_i16m4_b4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i16m8_b2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv32i16.nxv32i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool2_t test_vmsne_vv_i16m8_b2(vint16m8_t op1, vint16m8_t op2, size_t vl) { + return __riscv_th_vmsne_vv_i16m8_b2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i16m8_b2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv32i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool2_t test_vmsne_vx_i16m8_b2(vint16m8_t op1, int16_t op2, size_t vl) { + return __riscv_th_vmsne_vx_i16m8_b2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i32m1_b32 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv2i32.nxv2i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool32_t test_vmsne_vv_i32m1_b32(vint32m1_t op1, vint32m1_t op2, size_t vl) { + return __riscv_th_vmsne_vv_i32m1_b32(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i32m1_b32 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv2i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool32_t test_vmsne_vx_i32m1_b32(vint32m1_t op1, int32_t op2, size_t vl) { + return __riscv_th_vmsne_vx_i32m1_b32(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i32m2_b16 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv4i32.nxv4i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmsne_vv_i32m2_b16(vint32m2_t op1, vint32m2_t op2, size_t vl) { + return __riscv_th_vmsne_vv_i32m2_b16(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i32m2_b16 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv4i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmsne_vx_i32m2_b16(vint32m2_t op1, int32_t op2, size_t vl) { + return __riscv_th_vmsne_vx_i32m2_b16(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i32m4_b8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv8i32.nxv8i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsne_vv_i32m4_b8(vint32m4_t op1, vint32m4_t op2, size_t vl) { + return __riscv_th_vmsne_vv_i32m4_b8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i32m4_b8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv8i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsne_vx_i32m4_b8(vint32m4_t op1, int32_t op2, size_t vl) { + return __riscv_th_vmsne_vx_i32m4_b8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i32m8_b4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv16i32.nxv16i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmsne_vv_i32m8_b4(vint32m8_t op1, vint32m8_t op2, size_t vl) { + return __riscv_th_vmsne_vv_i32m8_b4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i32m8_b4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv16i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmsne_vx_i32m8_b4(vint32m8_t op1, int32_t op2, size_t vl) { + return __riscv_th_vmsne_vx_i32m8_b4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i64m1_b64 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv1i64.nxv1i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool64_t test_vmsne_vv_i64m1_b64(vint64m1_t op1, vint64m1_t op2, size_t vl) { + return __riscv_th_vmsne_vv_i64m1_b64(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i64m1_b64 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv1i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool64_t test_vmsne_vx_i64m1_b64(vint64m1_t op1, int64_t op2, size_t vl) { + return __riscv_th_vmsne_vx_i64m1_b64(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i64m2_b32 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv2i64.nxv2i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool32_t test_vmsne_vv_i64m2_b32(vint64m2_t op1, vint64m2_t op2, size_t vl) { + return __riscv_th_vmsne_vv_i64m2_b32(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i64m2_b32 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv2i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool32_t test_vmsne_vx_i64m2_b32(vint64m2_t op1, int64_t op2, size_t vl) { + return __riscv_th_vmsne_vx_i64m2_b32(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i64m4_b16 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv4i64.nxv4i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmsne_vv_i64m4_b16(vint64m4_t op1, vint64m4_t op2, size_t vl) { + return __riscv_th_vmsne_vv_i64m4_b16(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i64m4_b16 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv4i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmsne_vx_i64m4_b16(vint64m4_t op1, int64_t op2, size_t vl) { + return __riscv_th_vmsne_vx_i64m4_b16(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i64m8_b8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv8i64.nxv8i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsne_vv_i64m8_b8(vint64m8_t op1, vint64m8_t op2, size_t vl) { + return __riscv_th_vmsne_vv_i64m8_b8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i64m8_b8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv8i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsne_vx_i64m8_b8(vint64m8_t op1, int64_t op2, size_t vl) { + return __riscv_th_vmsne_vx_i64m8_b8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u8m1_b8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv8i8.nxv8i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsne_vv_u8m1_b8(vuint8m1_t op1, vuint8m1_t op2, size_t vl) { + return __riscv_th_vmsne_vv_u8m1_b8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u8m1_b8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv8i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsne_vx_u8m1_b8(vuint8m1_t op1, uint8_t op2, size_t vl) { + return __riscv_th_vmsne_vx_u8m1_b8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u8m2_b4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv16i8.nxv16i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmsne_vv_u8m2_b4(vuint8m2_t op1, vuint8m2_t op2, size_t vl) { + return __riscv_th_vmsne_vv_u8m2_b4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u8m2_b4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv16i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmsne_vx_u8m2_b4(vuint8m2_t op1, uint8_t op2, size_t vl) { + return __riscv_th_vmsne_vx_u8m2_b4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u8m4_b2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv32i8.nxv32i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool2_t test_vmsne_vv_u8m4_b2(vuint8m4_t op1, vuint8m4_t op2, size_t vl) { + return __riscv_th_vmsne_vv_u8m4_b2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u8m4_b2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv32i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool2_t test_vmsne_vx_u8m4_b2(vuint8m4_t op1, uint8_t op2, size_t vl) { + return __riscv_th_vmsne_vx_u8m4_b2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u8m8_b1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv64i8.nxv64i8.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool1_t test_vmsne_vv_u8m8_b1(vuint8m8_t op1, vuint8m8_t op2, size_t vl) { + return __riscv_th_vmsne_vv_u8m8_b1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u8m8_b1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv64i8.i8.i64( [[OP1]], i8 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool1_t test_vmsne_vx_u8m8_b1(vuint8m8_t op1, uint8_t op2, size_t vl) { + return __riscv_th_vmsne_vx_u8m8_b1(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u16m1_b16 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv4i16.nxv4i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmsne_vv_u16m1_b16(vuint16m1_t op1, vuint16m1_t op2, size_t vl) { + return __riscv_th_vmsne_vv_u16m1_b16(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u16m1_b16 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv4i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmsne_vx_u16m1_b16(vuint16m1_t op1, uint16_t op2, size_t vl) { + return __riscv_th_vmsne_vx_u16m1_b16(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u16m2_b8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv8i16.nxv8i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsne_vv_u16m2_b8(vuint16m2_t op1, vuint16m2_t op2, size_t vl) { + return __riscv_th_vmsne_vv_u16m2_b8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u16m2_b8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv8i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsne_vx_u16m2_b8(vuint16m2_t op1, uint16_t op2, size_t vl) { + return __riscv_th_vmsne_vx_u16m2_b8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u16m4_b4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv16i16.nxv16i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmsne_vv_u16m4_b4(vuint16m4_t op1, vuint16m4_t op2, size_t vl) { + return __riscv_th_vmsne_vv_u16m4_b4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u16m4_b4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv16i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmsne_vx_u16m4_b4(vuint16m4_t op1, uint16_t op2, size_t vl) { + return __riscv_th_vmsne_vx_u16m4_b4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u16m8_b2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv32i16.nxv32i16.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool2_t test_vmsne_vv_u16m8_b2(vuint16m8_t op1, vuint16m8_t op2, size_t vl) { + return __riscv_th_vmsne_vv_u16m8_b2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u16m8_b2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv32i16.i16.i64( [[OP1]], i16 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool2_t test_vmsne_vx_u16m8_b2(vuint16m8_t op1, uint16_t op2, size_t vl) { + return __riscv_th_vmsne_vx_u16m8_b2(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u32m1_b32 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv2i32.nxv2i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool32_t test_vmsne_vv_u32m1_b32(vuint32m1_t op1, vuint32m1_t op2, size_t vl) { + return __riscv_th_vmsne_vv_u32m1_b32(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u32m1_b32 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv2i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool32_t test_vmsne_vx_u32m1_b32(vuint32m1_t op1, uint32_t op2, size_t vl) { + return __riscv_th_vmsne_vx_u32m1_b32(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u32m2_b16 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv4i32.nxv4i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmsne_vv_u32m2_b16(vuint32m2_t op1, vuint32m2_t op2, size_t vl) { + return __riscv_th_vmsne_vv_u32m2_b16(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u32m2_b16 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv4i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmsne_vx_u32m2_b16(vuint32m2_t op1, uint32_t op2, size_t vl) { + return __riscv_th_vmsne_vx_u32m2_b16(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u32m4_b8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv8i32.nxv8i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsne_vv_u32m4_b8(vuint32m4_t op1, vuint32m4_t op2, size_t vl) { + return __riscv_th_vmsne_vv_u32m4_b8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u32m4_b8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv8i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsne_vx_u32m4_b8(vuint32m4_t op1, uint32_t op2, size_t vl) { + return __riscv_th_vmsne_vx_u32m4_b8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u32m8_b4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv16i32.nxv16i32.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmsne_vv_u32m8_b4(vuint32m8_t op1, vuint32m8_t op2, size_t vl) { + return __riscv_th_vmsne_vv_u32m8_b4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u32m8_b4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv16i32.i32.i64( [[OP1]], i32 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmsne_vx_u32m8_b4(vuint32m8_t op1, uint32_t op2, size_t vl) { + return __riscv_th_vmsne_vx_u32m8_b4(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u64m1_b64 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv1i64.nxv1i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool64_t test_vmsne_vv_u64m1_b64(vuint64m1_t op1, vuint64m1_t op2, size_t vl) { + return __riscv_th_vmsne_vv_u64m1_b64(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u64m1_b64 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv1i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool64_t test_vmsne_vx_u64m1_b64(vuint64m1_t op1, uint64_t op2, size_t vl) { + return __riscv_th_vmsne_vx_u64m1_b64(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u64m2_b32 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv2i64.nxv2i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool32_t test_vmsne_vv_u64m2_b32(vuint64m2_t op1, vuint64m2_t op2, size_t vl) { + return __riscv_th_vmsne_vv_u64m2_b32(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u64m2_b32 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv2i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool32_t test_vmsne_vx_u64m2_b32(vuint64m2_t op1, uint64_t op2, size_t vl) { + return __riscv_th_vmsne_vx_u64m2_b32(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u64m4_b16 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv4i64.nxv4i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmsne_vv_u64m4_b16(vuint64m4_t op1, vuint64m4_t op2, size_t vl) { + return __riscv_th_vmsne_vv_u64m4_b16(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u64m4_b16 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv4i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmsne_vx_u64m4_b16(vuint64m4_t op1, uint64_t op2, size_t vl) { + return __riscv_th_vmsne_vx_u64m4_b16(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u64m8_b8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv8i64.nxv8i64.i64( [[OP1]], [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsne_vv_u64m8_b8(vuint64m8_t op1, vuint64m8_t op2, size_t vl) { + return __riscv_th_vmsne_vv_u64m8_b8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u64m8_b8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.nxv8i64.i64.i64( [[OP1]], i64 [[OP2]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsne_vx_u64m8_b8(vuint64m8_t op1, uint64_t op2, size_t vl) { + return __riscv_th_vmsne_vx_u64m8_b8(op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i8m1_b8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsne_vv_i8m1_b8_m(vbool8_t mask, vint8m1_t op1, vint8m1_t op2, size_t vl) { + return __riscv_th_vmsne_vv_i8m1_b8_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i8m1_b8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsne_vx_i8m1_b8_m(vbool8_t mask, vint8m1_t op1, int8_t op2, size_t vl) { + return __riscv_th_vmsne_vx_i8m1_b8_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i8m2_b4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmsne_vv_i8m2_b4_m(vbool4_t mask, vint8m2_t op1, vint8m2_t op2, size_t vl) { + return __riscv_th_vmsne_vv_i8m2_b4_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i8m2_b4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmsne_vx_i8m2_b4_m(vbool4_t mask, vint8m2_t op1, int8_t op2, size_t vl) { + return __riscv_th_vmsne_vx_i8m2_b4_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i8m4_b2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool2_t test_vmsne_vv_i8m4_b2_m(vbool2_t mask, vint8m4_t op1, vint8m4_t op2, size_t vl) { + return __riscv_th_vmsne_vv_i8m4_b2_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i8m4_b2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool2_t test_vmsne_vx_i8m4_b2_m(vbool2_t mask, vint8m4_t op1, int8_t op2, size_t vl) { + return __riscv_th_vmsne_vx_i8m4_b2_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i8m8_b1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool1_t test_vmsne_vv_i8m8_b1_m(vbool1_t mask, vint8m8_t op1, vint8m8_t op2, size_t vl) { + return __riscv_th_vmsne_vv_i8m8_b1_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i8m8_b1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool1_t test_vmsne_vx_i8m8_b1_m(vbool1_t mask, vint8m8_t op1, int8_t op2, size_t vl) { + return __riscv_th_vmsne_vx_i8m8_b1_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i16m1_b16_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmsne_vv_i16m1_b16_m(vbool16_t mask, vint16m1_t op1, vint16m1_t op2, size_t vl) { + return __riscv_th_vmsne_vv_i16m1_b16_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i16m1_b16_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmsne_vx_i16m1_b16_m(vbool16_t mask, vint16m1_t op1, int16_t op2, size_t vl) { + return __riscv_th_vmsne_vx_i16m1_b16_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i16m2_b8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsne_vv_i16m2_b8_m(vbool8_t mask, vint16m2_t op1, vint16m2_t op2, size_t vl) { + return __riscv_th_vmsne_vv_i16m2_b8_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i16m2_b8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsne_vx_i16m2_b8_m(vbool8_t mask, vint16m2_t op1, int16_t op2, size_t vl) { + return __riscv_th_vmsne_vx_i16m2_b8_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i16m4_b4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmsne_vv_i16m4_b4_m(vbool4_t mask, vint16m4_t op1, vint16m4_t op2, size_t vl) { + return __riscv_th_vmsne_vv_i16m4_b4_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i16m4_b4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmsne_vx_i16m4_b4_m(vbool4_t mask, vint16m4_t op1, int16_t op2, size_t vl) { + return __riscv_th_vmsne_vx_i16m4_b4_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i16m8_b2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool2_t test_vmsne_vv_i16m8_b2_m(vbool2_t mask, vint16m8_t op1, vint16m8_t op2, size_t vl) { + return __riscv_th_vmsne_vv_i16m8_b2_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i16m8_b2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool2_t test_vmsne_vx_i16m8_b2_m(vbool2_t mask, vint16m8_t op1, int16_t op2, size_t vl) { + return __riscv_th_vmsne_vx_i16m8_b2_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i32m1_b32_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool32_t test_vmsne_vv_i32m1_b32_m(vbool32_t mask, vint32m1_t op1, vint32m1_t op2, size_t vl) { + return __riscv_th_vmsne_vv_i32m1_b32_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i32m1_b32_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool32_t test_vmsne_vx_i32m1_b32_m(vbool32_t mask, vint32m1_t op1, int32_t op2, size_t vl) { + return __riscv_th_vmsne_vx_i32m1_b32_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i32m2_b16_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmsne_vv_i32m2_b16_m(vbool16_t mask, vint32m2_t op1, vint32m2_t op2, size_t vl) { + return __riscv_th_vmsne_vv_i32m2_b16_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i32m2_b16_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmsne_vx_i32m2_b16_m(vbool16_t mask, vint32m2_t op1, int32_t op2, size_t vl) { + return __riscv_th_vmsne_vx_i32m2_b16_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i32m4_b8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsne_vv_i32m4_b8_m(vbool8_t mask, vint32m4_t op1, vint32m4_t op2, size_t vl) { + return __riscv_th_vmsne_vv_i32m4_b8_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i32m4_b8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsne_vx_i32m4_b8_m(vbool8_t mask, vint32m4_t op1, int32_t op2, size_t vl) { + return __riscv_th_vmsne_vx_i32m4_b8_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i32m8_b4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmsne_vv_i32m8_b4_m(vbool4_t mask, vint32m8_t op1, vint32m8_t op2, size_t vl) { + return __riscv_th_vmsne_vv_i32m8_b4_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i32m8_b4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmsne_vx_i32m8_b4_m(vbool4_t mask, vint32m8_t op1, int32_t op2, size_t vl) { + return __riscv_th_vmsne_vx_i32m8_b4_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i64m1_b64_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool64_t test_vmsne_vv_i64m1_b64_m(vbool64_t mask, vint64m1_t op1, vint64m1_t op2, size_t vl) { + return __riscv_th_vmsne_vv_i64m1_b64_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i64m1_b64_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool64_t test_vmsne_vx_i64m1_b64_m(vbool64_t mask, vint64m1_t op1, int64_t op2, size_t vl) { + return __riscv_th_vmsne_vx_i64m1_b64_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i64m2_b32_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool32_t test_vmsne_vv_i64m2_b32_m(vbool32_t mask, vint64m2_t op1, vint64m2_t op2, size_t vl) { + return __riscv_th_vmsne_vv_i64m2_b32_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i64m2_b32_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool32_t test_vmsne_vx_i64m2_b32_m(vbool32_t mask, vint64m2_t op1, int64_t op2, size_t vl) { + return __riscv_th_vmsne_vx_i64m2_b32_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i64m4_b16_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmsne_vv_i64m4_b16_m(vbool16_t mask, vint64m4_t op1, vint64m4_t op2, size_t vl) { + return __riscv_th_vmsne_vv_i64m4_b16_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i64m4_b16_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmsne_vx_i64m4_b16_m(vbool16_t mask, vint64m4_t op1, int64_t op2, size_t vl) { + return __riscv_th_vmsne_vx_i64m4_b16_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_i64m8_b8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsne_vv_i64m8_b8_m(vbool8_t mask, vint64m8_t op1, vint64m8_t op2, size_t vl) { + return __riscv_th_vmsne_vv_i64m8_b8_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_i64m8_b8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsne_vx_i64m8_b8_m(vbool8_t mask, vint64m8_t op1, int64_t op2, size_t vl) { + return __riscv_th_vmsne_vx_i64m8_b8_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u8m1_b8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsne_vv_u8m1_b8_m(vbool8_t mask, vuint8m1_t op1, vuint8m1_t op2, size_t vl) { + return __riscv_th_vmsne_vv_u8m1_b8_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u8m1_b8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsne_vx_u8m1_b8_m(vbool8_t mask, vuint8m1_t op1, uint8_t op2, size_t vl) { + return __riscv_th_vmsne_vx_u8m1_b8_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u8m2_b4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmsne_vv_u8m2_b4_m(vbool4_t mask, vuint8m2_t op1, vuint8m2_t op2, size_t vl) { + return __riscv_th_vmsne_vv_u8m2_b4_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u8m2_b4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmsne_vx_u8m2_b4_m(vbool4_t mask, vuint8m2_t op1, uint8_t op2, size_t vl) { + return __riscv_th_vmsne_vx_u8m2_b4_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u8m4_b2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool2_t test_vmsne_vv_u8m4_b2_m(vbool2_t mask, vuint8m4_t op1, vuint8m4_t op2, size_t vl) { + return __riscv_th_vmsne_vv_u8m4_b2_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u8m4_b2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool2_t test_vmsne_vx_u8m4_b2_m(vbool2_t mask, vuint8m4_t op1, uint8_t op2, size_t vl) { + return __riscv_th_vmsne_vx_u8m4_b2_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u8m8_b1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool1_t test_vmsne_vv_u8m8_b1_m(vbool1_t mask, vuint8m8_t op1, vuint8m8_t op2, size_t vl) { + return __riscv_th_vmsne_vv_u8m8_b1_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u8m8_b1_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool1_t test_vmsne_vx_u8m8_b1_m(vbool1_t mask, vuint8m8_t op1, uint8_t op2, size_t vl) { + return __riscv_th_vmsne_vx_u8m8_b1_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u16m1_b16_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmsne_vv_u16m1_b16_m(vbool16_t mask, vuint16m1_t op1, vuint16m1_t op2, size_t vl) { + return __riscv_th_vmsne_vv_u16m1_b16_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u16m1_b16_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmsne_vx_u16m1_b16_m(vbool16_t mask, vuint16m1_t op1, uint16_t op2, size_t vl) { + return __riscv_th_vmsne_vx_u16m1_b16_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u16m2_b8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsne_vv_u16m2_b8_m(vbool8_t mask, vuint16m2_t op1, vuint16m2_t op2, size_t vl) { + return __riscv_th_vmsne_vv_u16m2_b8_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u16m2_b8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsne_vx_u16m2_b8_m(vbool8_t mask, vuint16m2_t op1, uint16_t op2, size_t vl) { + return __riscv_th_vmsne_vx_u16m2_b8_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u16m4_b4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmsne_vv_u16m4_b4_m(vbool4_t mask, vuint16m4_t op1, vuint16m4_t op2, size_t vl) { + return __riscv_th_vmsne_vv_u16m4_b4_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u16m4_b4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmsne_vx_u16m4_b4_m(vbool4_t mask, vuint16m4_t op1, uint16_t op2, size_t vl) { + return __riscv_th_vmsne_vx_u16m4_b4_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u16m8_b2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool2_t test_vmsne_vv_u16m8_b2_m(vbool2_t mask, vuint16m8_t op1, vuint16m8_t op2, size_t vl) { + return __riscv_th_vmsne_vv_u16m8_b2_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u16m8_b2_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool2_t test_vmsne_vx_u16m8_b2_m(vbool2_t mask, vuint16m8_t op1, uint16_t op2, size_t vl) { + return __riscv_th_vmsne_vx_u16m8_b2_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u32m1_b32_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool32_t test_vmsne_vv_u32m1_b32_m(vbool32_t mask, vuint32m1_t op1, vuint32m1_t op2, size_t vl) { + return __riscv_th_vmsne_vv_u32m1_b32_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u32m1_b32_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool32_t test_vmsne_vx_u32m1_b32_m(vbool32_t mask, vuint32m1_t op1, uint32_t op2, size_t vl) { + return __riscv_th_vmsne_vx_u32m1_b32_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u32m2_b16_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmsne_vv_u32m2_b16_m(vbool16_t mask, vuint32m2_t op1, vuint32m2_t op2, size_t vl) { + return __riscv_th_vmsne_vv_u32m2_b16_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u32m2_b16_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmsne_vx_u32m2_b16_m(vbool16_t mask, vuint32m2_t op1, uint32_t op2, size_t vl) { + return __riscv_th_vmsne_vx_u32m2_b16_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u32m4_b8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsne_vv_u32m4_b8_m(vbool8_t mask, vuint32m4_t op1, vuint32m4_t op2, size_t vl) { + return __riscv_th_vmsne_vv_u32m4_b8_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u32m4_b8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsne_vx_u32m4_b8_m(vbool8_t mask, vuint32m4_t op1, uint32_t op2, size_t vl) { + return __riscv_th_vmsne_vx_u32m4_b8_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u32m8_b4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmsne_vv_u32m8_b4_m(vbool4_t mask, vuint32m8_t op1, vuint32m8_t op2, size_t vl) { + return __riscv_th_vmsne_vv_u32m8_b4_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u32m8_b4_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool4_t test_vmsne_vx_u32m8_b4_m(vbool4_t mask, vuint32m8_t op1, uint32_t op2, size_t vl) { + return __riscv_th_vmsne_vx_u32m8_b4_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u64m1_b64_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool64_t test_vmsne_vv_u64m1_b64_m(vbool64_t mask, vuint64m1_t op1, vuint64m1_t op2, size_t vl) { + return __riscv_th_vmsne_vv_u64m1_b64_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u64m1_b64_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool64_t test_vmsne_vx_u64m1_b64_m(vbool64_t mask, vuint64m1_t op1, uint64_t op2, size_t vl) { + return __riscv_th_vmsne_vx_u64m1_b64_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u64m2_b32_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool32_t test_vmsne_vv_u64m2_b32_m(vbool32_t mask, vuint64m2_t op1, vuint64m2_t op2, size_t vl) { + return __riscv_th_vmsne_vv_u64m2_b32_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u64m2_b32_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool32_t test_vmsne_vx_u64m2_b32_m(vbool32_t mask, vuint64m2_t op1, uint64_t op2, size_t vl) { + return __riscv_th_vmsne_vx_u64m2_b32_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u64m4_b16_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmsne_vv_u64m4_b16_m(vbool16_t mask, vuint64m4_t op1, vuint64m4_t op2, size_t vl) { + return __riscv_th_vmsne_vv_u64m4_b16_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u64m4_b16_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool16_t test_vmsne_vx_u64m4_b16_m(vbool16_t mask, vuint64m4_t op1, uint64_t op2, size_t vl) { + return __riscv_th_vmsne_vx_u64m4_b16_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vv_u64m8_b8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsne_vv_u64m8_b8_m(vbool8_t mask, vuint64m8_t op1, vuint64m8_t op2, size_t vl) { + return __riscv_th_vmsne_vv_u64m8_b8_m(mask, op1, op2, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmsne_vx_u64m8_b8_m +// CHECK-RV64-SAME: ( [[MASK:%.*]], [[OP1:%.*]], i64 noundef [[OP2:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmsne.mask.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vbool8_t test_vmsne_vx_u64m8_b8_m(vbool8_t mask, vuint64m8_t op1, uint64_t op2, size_t vl) { + return __riscv_th_vmsne_vx_u64m8_b8_m(mask, op1, op2, vl); +} +