From 19f5eaabe39572ebc861bafe68bf617888435091 Mon Sep 17 00:00:00 2001 From: imkiva Date: Wed, 13 Mar 2024 13:37:25 +0800 Subject: [PATCH 1/2] [Clang][XTHeadVector] implement 12.14 `vmerge` --- .../include/clang/Basic/riscv_vector_xtheadv.td | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/clang/include/clang/Basic/riscv_vector_xtheadv.td b/clang/include/clang/Basic/riscv_vector_xtheadv.td index f89b31ee28da1b..8eb5f296fd6072 100644 --- a/clang/include/clang/Basic/riscv_vector_xtheadv.td +++ b/clang/include/clang/Basic/riscv_vector_xtheadv.td @@ -1124,6 +1124,23 @@ let MaskedPolicyScheme = NonePolicy, // 12.13. Vector Widening Integer Multiply-Add Operations // 12.14. Vector Integer Merge Operations +let HasMasked = false, + UnMaskedPolicyScheme = HasPassthruOperand, + MaskedPolicyScheme = NonePolicy, + ManualCodegen = [{ + { + // Builtin: (mask, op1, op2, vl), Intrinsic: (passthru, op1, op2, mask, vl) + if (PolicyAttrs & RVV_VTA) + Ops.insert(Ops.begin(), llvm::PoisonValue::get(ResultType)); // passthru + IntrinsicTypes = {ResultType, Ops[2]->getType(), Ops.back()->getType()}; + } + }] in { + defm th_vmerge : RVVOutOp1BuiltinSet<"vmerge", "csil", + [["vvm", "v", "vvvm"], + ["vxm", "v", "vvem"], + ["vvm", "Uv", "UvUvUvm"], + ["vxm", "Uv", "UvUvUem"]]>; +} // 12.15. Vector Integer Move Operations From 620cfee34514266f58090e2e99d599acdea5add5 Mon Sep 17 00:00:00 2001 From: imkiva Date: Wed, 13 Mar 2024 15:07:35 +0800 Subject: [PATCH 2/2] [Clang][XTHeadVector] test 12.14 `vmerge` --- .../clang/Basic/riscv_vector_xtheadv.td | 2 +- .../vector-integer-merge/thead/vmerge.c | 646 ++++++++++++++++++ 2 files changed, 647 insertions(+), 1 deletion(-) create mode 100644 clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-merge/thead/vmerge.c diff --git a/clang/include/clang/Basic/riscv_vector_xtheadv.td b/clang/include/clang/Basic/riscv_vector_xtheadv.td index 8eb5f296fd6072..9ec203e728d703 100644 --- a/clang/include/clang/Basic/riscv_vector_xtheadv.td +++ b/clang/include/clang/Basic/riscv_vector_xtheadv.td @@ -1135,7 +1135,7 @@ let HasMasked = false, IntrinsicTypes = {ResultType, Ops[2]->getType(), Ops.back()->getType()}; } }] in { - defm th_vmerge : RVVOutOp1BuiltinSet<"vmerge", "csil", + defm th_vmerge : RVVOutOp1BuiltinSet<"th_vmerge", "csil", [["vvm", "v", "vvvm"], ["vxm", "v", "vvem"], ["vvm", "Uv", "UvUvUvm"], diff --git a/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-merge/thead/vmerge.c b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-merge/thead/vmerge.c new file mode 100644 index 00000000000000..8606beaff27dbc --- /dev/null +++ b/clang/test/CodeGen/RISCV/rvv0p71-intrinsics-handcrafted/vector-integer-merge/thead/vmerge.c @@ -0,0 +1,646 @@ +// RUN: %clang_cc1 -triple riscv64 -target-feature +xtheadvector \ +// RUN: -disable-O0-optnone -emit-llvm %s -o - | \ +// RUN: opt -S -passes=mem2reg | \ +// RUN: FileCheck --check-prefix=CHECK-RV64 %s + +#include + +// CHECK-RV64-LABEL: define dso_local @test_vmerge_vvm_i8m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m1_t test_vmerge_vvm_i8m1(vint8m1_t op1, vint8m1_t op2, vbool8_t mask, size_t vl) { + return __riscv_th_vmerge_vvm_i8m1(op1, op2, mask, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmerge_vxm_i8m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m1_t test_vmerge_vxm_i8m1(vint8m1_t op1, int8_t op2, vbool8_t mask, size_t vl) { + return __riscv_th_vmerge_vxm_i8m1(op1, op2, mask, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmerge_vvm_i8m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m2_t test_vmerge_vvm_i8m2(vint8m2_t op1, vint8m2_t op2, vbool4_t mask, size_t vl) { + return __riscv_th_vmerge_vvm_i8m2(op1, op2, mask, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmerge_vxm_i8m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m2_t test_vmerge_vxm_i8m2(vint8m2_t op1, int8_t op2, vbool4_t mask, size_t vl) { + return __riscv_th_vmerge_vxm_i8m2(op1, op2, mask, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmerge_vvm_i8m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m4_t test_vmerge_vvm_i8m4(vint8m4_t op1, vint8m4_t op2, vbool2_t mask, size_t vl) { + return __riscv_th_vmerge_vvm_i8m4(op1, op2, mask, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmerge_vxm_i8m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m4_t test_vmerge_vxm_i8m4(vint8m4_t op1, int8_t op2, vbool2_t mask, size_t vl) { + return __riscv_th_vmerge_vxm_i8m4(op1, op2, mask, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmerge_vvm_i8m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m8_t test_vmerge_vvm_i8m8(vint8m8_t op1, vint8m8_t op2, vbool1_t mask, size_t vl) { + return __riscv_th_vmerge_vvm_i8m8(op1, op2, mask, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmerge_vxm_i8m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef signext [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint8m8_t test_vmerge_vxm_i8m8(vint8m8_t op1, int8_t op2, vbool1_t mask, size_t vl) { + return __riscv_th_vmerge_vxm_i8m8(op1, op2, mask, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmerge_vvm_i16m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m1_t test_vmerge_vvm_i16m1(vint16m1_t op1, vint16m1_t op2, vbool16_t mask, size_t vl) { + return __riscv_th_vmerge_vvm_i16m1(op1, op2, mask, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmerge_vxm_i16m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m1_t test_vmerge_vxm_i16m1(vint16m1_t op1, int16_t op2, vbool16_t mask, size_t vl) { + return __riscv_th_vmerge_vxm_i16m1(op1, op2, mask, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmerge_vvm_i16m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m2_t test_vmerge_vvm_i16m2(vint16m2_t op1, vint16m2_t op2, vbool8_t mask, size_t vl) { + return __riscv_th_vmerge_vvm_i16m2(op1, op2, mask, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmerge_vxm_i16m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m2_t test_vmerge_vxm_i16m2(vint16m2_t op1, int16_t op2, vbool8_t mask, size_t vl) { + return __riscv_th_vmerge_vxm_i16m2(op1, op2, mask, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmerge_vvm_i16m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m4_t test_vmerge_vvm_i16m4(vint16m4_t op1, vint16m4_t op2, vbool4_t mask, size_t vl) { + return __riscv_th_vmerge_vvm_i16m4(op1, op2, mask, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmerge_vxm_i16m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m4_t test_vmerge_vxm_i16m4(vint16m4_t op1, int16_t op2, vbool4_t mask, size_t vl) { + return __riscv_th_vmerge_vxm_i16m4(op1, op2, mask, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmerge_vvm_i16m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m8_t test_vmerge_vvm_i16m8(vint16m8_t op1, vint16m8_t op2, vbool2_t mask, size_t vl) { + return __riscv_th_vmerge_vvm_i16m8(op1, op2, mask, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmerge_vxm_i16m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef signext [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint16m8_t test_vmerge_vxm_i16m8(vint16m8_t op1, int16_t op2, vbool2_t mask, size_t vl) { + return __riscv_th_vmerge_vxm_i16m8(op1, op2, mask, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmerge_vvm_i32m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m1_t test_vmerge_vvm_i32m1(vint32m1_t op1, vint32m1_t op2, vbool32_t mask, size_t vl) { + return __riscv_th_vmerge_vvm_i32m1(op1, op2, mask, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmerge_vxm_i32m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m1_t test_vmerge_vxm_i32m1(vint32m1_t op1, int32_t op2, vbool32_t mask, size_t vl) { + return __riscv_th_vmerge_vxm_i32m1(op1, op2, mask, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmerge_vvm_i32m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m2_t test_vmerge_vvm_i32m2(vint32m2_t op1, vint32m2_t op2, vbool16_t mask, size_t vl) { + return __riscv_th_vmerge_vvm_i32m2(op1, op2, mask, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmerge_vxm_i32m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m2_t test_vmerge_vxm_i32m2(vint32m2_t op1, int32_t op2, vbool16_t mask, size_t vl) { + return __riscv_th_vmerge_vxm_i32m2(op1, op2, mask, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmerge_vvm_i32m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m4_t test_vmerge_vvm_i32m4(vint32m4_t op1, vint32m4_t op2, vbool8_t mask, size_t vl) { + return __riscv_th_vmerge_vvm_i32m4(op1, op2, mask, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmerge_vxm_i32m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m4_t test_vmerge_vxm_i32m4(vint32m4_t op1, int32_t op2, vbool8_t mask, size_t vl) { + return __riscv_th_vmerge_vxm_i32m4(op1, op2, mask, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmerge_vvm_i32m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m8_t test_vmerge_vvm_i32m8(vint32m8_t op1, vint32m8_t op2, vbool4_t mask, size_t vl) { + return __riscv_th_vmerge_vvm_i32m8(op1, op2, mask, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmerge_vxm_i32m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint32m8_t test_vmerge_vxm_i32m8(vint32m8_t op1, int32_t op2, vbool4_t mask, size_t vl) { + return __riscv_th_vmerge_vxm_i32m8(op1, op2, mask, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmerge_vvm_i64m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m1_t test_vmerge_vvm_i64m1(vint64m1_t op1, vint64m1_t op2, vbool64_t mask, size_t vl) { + return __riscv_th_vmerge_vvm_i64m1(op1, op2, mask, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmerge_vxm_i64m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m1_t test_vmerge_vxm_i64m1(vint64m1_t op1, int64_t op2, vbool64_t mask, size_t vl) { + return __riscv_th_vmerge_vxm_i64m1(op1, op2, mask, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmerge_vvm_i64m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m2_t test_vmerge_vvm_i64m2(vint64m2_t op1, vint64m2_t op2, vbool32_t mask, size_t vl) { + return __riscv_th_vmerge_vvm_i64m2(op1, op2, mask, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmerge_vxm_i64m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m2_t test_vmerge_vxm_i64m2(vint64m2_t op1, int64_t op2, vbool32_t mask, size_t vl) { + return __riscv_th_vmerge_vxm_i64m2(op1, op2, mask, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmerge_vvm_i64m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m4_t test_vmerge_vvm_i64m4(vint64m4_t op1, vint64m4_t op2, vbool16_t mask, size_t vl) { + return __riscv_th_vmerge_vvm_i64m4(op1, op2, mask, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmerge_vxm_i64m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m4_t test_vmerge_vxm_i64m4(vint64m4_t op1, int64_t op2, vbool16_t mask, size_t vl) { + return __riscv_th_vmerge_vxm_i64m4(op1, op2, mask, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmerge_vvm_i64m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m8_t test_vmerge_vvm_i64m8(vint64m8_t op1, vint64m8_t op2, vbool8_t mask, size_t vl) { + return __riscv_th_vmerge_vvm_i64m8(op1, op2, mask, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmerge_vxm_i64m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vint64m8_t test_vmerge_vxm_i64m8(vint64m8_t op1, int64_t op2, vbool8_t mask, size_t vl) { + return __riscv_th_vmerge_vxm_i64m8(op1, op2, mask, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmerge_vvm_u8m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv8i8.nxv8i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vmerge_vvm_u8m1(vuint8m1_t op1, vuint8m1_t op2, vbool8_t mask, size_t vl) { + return __riscv_th_vmerge_vvm_u8m1(op1, op2, mask, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmerge_vxm_u8m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv8i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m1_t test_vmerge_vxm_u8m1(vuint8m1_t op1, uint8_t op2, vbool8_t mask, size_t vl) { + return __riscv_th_vmerge_vxm_u8m1(op1, op2, mask, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmerge_vvm_u8m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv16i8.nxv16i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vmerge_vvm_u8m2(vuint8m2_t op1, vuint8m2_t op2, vbool4_t mask, size_t vl) { + return __riscv_th_vmerge_vvm_u8m2(op1, op2, mask, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmerge_vxm_u8m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv16i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m2_t test_vmerge_vxm_u8m2(vuint8m2_t op1, uint8_t op2, vbool4_t mask, size_t vl) { + return __riscv_th_vmerge_vxm_u8m2(op1, op2, mask, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmerge_vvm_u8m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv32i8.nxv32i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vmerge_vvm_u8m4(vuint8m4_t op1, vuint8m4_t op2, vbool2_t mask, size_t vl) { + return __riscv_th_vmerge_vvm_u8m4(op1, op2, mask, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmerge_vxm_u8m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv32i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m4_t test_vmerge_vxm_u8m4(vuint8m4_t op1, uint8_t op2, vbool2_t mask, size_t vl) { + return __riscv_th_vmerge_vxm_u8m4(op1, op2, mask, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmerge_vvm_u8m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv64i8.nxv64i8.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vmerge_vvm_u8m8(vuint8m8_t op1, vuint8m8_t op2, vbool1_t mask, size_t vl) { + return __riscv_th_vmerge_vvm_u8m8(op1, op2, mask, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmerge_vxm_u8m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i8 noundef zeroext [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv64i8.i8.i64( poison, [[OP1]], i8 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint8m8_t test_vmerge_vxm_u8m8(vuint8m8_t op1, uint8_t op2, vbool1_t mask, size_t vl) { + return __riscv_th_vmerge_vxm_u8m8(op1, op2, mask, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmerge_vvm_u16m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv4i16.nxv4i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vmerge_vvm_u16m1(vuint16m1_t op1, vuint16m1_t op2, vbool16_t mask, size_t vl) { + return __riscv_th_vmerge_vvm_u16m1(op1, op2, mask, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmerge_vxm_u16m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv4i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m1_t test_vmerge_vxm_u16m1(vuint16m1_t op1, uint16_t op2, vbool16_t mask, size_t vl) { + return __riscv_th_vmerge_vxm_u16m1(op1, op2, mask, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmerge_vvm_u16m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv8i16.nxv8i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vmerge_vvm_u16m2(vuint16m2_t op1, vuint16m2_t op2, vbool8_t mask, size_t vl) { + return __riscv_th_vmerge_vvm_u16m2(op1, op2, mask, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmerge_vxm_u16m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv8i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m2_t test_vmerge_vxm_u16m2(vuint16m2_t op1, uint16_t op2, vbool8_t mask, size_t vl) { + return __riscv_th_vmerge_vxm_u16m2(op1, op2, mask, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmerge_vvm_u16m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv16i16.nxv16i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vmerge_vvm_u16m4(vuint16m4_t op1, vuint16m4_t op2, vbool4_t mask, size_t vl) { + return __riscv_th_vmerge_vvm_u16m4(op1, op2, mask, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmerge_vxm_u16m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv16i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m4_t test_vmerge_vxm_u16m4(vuint16m4_t op1, uint16_t op2, vbool4_t mask, size_t vl) { + return __riscv_th_vmerge_vxm_u16m4(op1, op2, mask, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmerge_vvm_u16m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv32i16.nxv32i16.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vmerge_vvm_u16m8(vuint16m8_t op1, vuint16m8_t op2, vbool2_t mask, size_t vl) { + return __riscv_th_vmerge_vvm_u16m8(op1, op2, mask, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmerge_vxm_u16m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i16 noundef zeroext [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv32i16.i16.i64( poison, [[OP1]], i16 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint16m8_t test_vmerge_vxm_u16m8(vuint16m8_t op1, uint16_t op2, vbool2_t mask, size_t vl) { + return __riscv_th_vmerge_vxm_u16m8(op1, op2, mask, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmerge_vvm_u32m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv2i32.nxv2i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vmerge_vvm_u32m1(vuint32m1_t op1, vuint32m1_t op2, vbool32_t mask, size_t vl) { + return __riscv_th_vmerge_vvm_u32m1(op1, op2, mask, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmerge_vxm_u32m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv2i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m1_t test_vmerge_vxm_u32m1(vuint32m1_t op1, uint32_t op2, vbool32_t mask, size_t vl) { + return __riscv_th_vmerge_vxm_u32m1(op1, op2, mask, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmerge_vvm_u32m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv4i32.nxv4i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vmerge_vvm_u32m2(vuint32m2_t op1, vuint32m2_t op2, vbool16_t mask, size_t vl) { + return __riscv_th_vmerge_vvm_u32m2(op1, op2, mask, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmerge_vxm_u32m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv4i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m2_t test_vmerge_vxm_u32m2(vuint32m2_t op1, uint32_t op2, vbool16_t mask, size_t vl) { + return __riscv_th_vmerge_vxm_u32m2(op1, op2, mask, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmerge_vvm_u32m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv8i32.nxv8i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vmerge_vvm_u32m4(vuint32m4_t op1, vuint32m4_t op2, vbool8_t mask, size_t vl) { + return __riscv_th_vmerge_vvm_u32m4(op1, op2, mask, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmerge_vxm_u32m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv8i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m4_t test_vmerge_vxm_u32m4(vuint32m4_t op1, uint32_t op2, vbool8_t mask, size_t vl) { + return __riscv_th_vmerge_vxm_u32m4(op1, op2, mask, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmerge_vvm_u32m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv16i32.nxv16i32.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vmerge_vvm_u32m8(vuint32m8_t op1, vuint32m8_t op2, vbool4_t mask, size_t vl) { + return __riscv_th_vmerge_vvm_u32m8(op1, op2, mask, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmerge_vxm_u32m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i32 noundef signext [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv16i32.i32.i64( poison, [[OP1]], i32 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint32m8_t test_vmerge_vxm_u32m8(vuint32m8_t op1, uint32_t op2, vbool4_t mask, size_t vl) { + return __riscv_th_vmerge_vxm_u32m8(op1, op2, mask, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmerge_vvm_u64m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv1i64.nxv1i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vmerge_vvm_u64m1(vuint64m1_t op1, vuint64m1_t op2, vbool64_t mask, size_t vl) { + return __riscv_th_vmerge_vvm_u64m1(op1, op2, mask, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmerge_vxm_u64m1 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv1i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m1_t test_vmerge_vxm_u64m1(vuint64m1_t op1, uint64_t op2, vbool64_t mask, size_t vl) { + return __riscv_th_vmerge_vxm_u64m1(op1, op2, mask, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmerge_vvm_u64m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv2i64.nxv2i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vmerge_vvm_u64m2(vuint64m2_t op1, vuint64m2_t op2, vbool32_t mask, size_t vl) { + return __riscv_th_vmerge_vvm_u64m2(op1, op2, mask, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmerge_vxm_u64m2 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv2i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m2_t test_vmerge_vxm_u64m2(vuint64m2_t op1, uint64_t op2, vbool32_t mask, size_t vl) { + return __riscv_th_vmerge_vxm_u64m2(op1, op2, mask, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmerge_vvm_u64m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv4i64.nxv4i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vmerge_vvm_u64m4(vuint64m4_t op1, vuint64m4_t op2, vbool16_t mask, size_t vl) { + return __riscv_th_vmerge_vvm_u64m4(op1, op2, mask, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmerge_vxm_u64m4 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv4i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m4_t test_vmerge_vxm_u64m4(vuint64m4_t op1, uint64_t op2, vbool16_t mask, size_t vl) { + return __riscv_th_vmerge_vxm_u64m4(op1, op2, mask, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmerge_vvm_u64m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv8i64.nxv8i64.i64( poison, [[OP1]], [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vmerge_vvm_u64m8(vuint64m8_t op1, vuint64m8_t op2, vbool8_t mask, size_t vl) { + return __riscv_th_vmerge_vvm_u64m8(op1, op2, mask, vl); +} + +// CHECK-RV64-LABEL: define dso_local @test_vmerge_vxm_u64m8 +// CHECK-RV64-SAME: ( [[OP1:%.*]], i64 noundef [[OP2:%.*]], [[MASK:%.*]], i64 noundef [[VL:%.*]]) #[[ATTR0:[0-9]+]] { +// CHECK-RV64-NEXT: entry: +// CHECK-RV64-NEXT: [[TMP0:%.*]] = call @llvm.riscv.th.vmerge.nxv8i64.i64.i64( poison, [[OP1]], i64 [[OP2]], [[MASK]], i64 [[VL]]) +// CHECK-RV64-NEXT: ret [[TMP0]] +// +vuint64m8_t test_vmerge_vxm_u64m8(vuint64m8_t op1, uint64_t op2, vbool8_t mask, size_t vl) { + return __riscv_th_vmerge_vxm_u64m8(op1, op2, mask, vl); +}